mirror of
https://github.com/home-assistant/core.git
synced 2025-08-03 12:45:28 +02:00
Merge branch 'dev' into epenet-20250527-1510
This commit is contained in:
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -360,7 +360,7 @@ jobs:
|
|||||||
- name: Run ruff
|
- name: Run ruff
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||||
env:
|
env:
|
||||||
RUFF_OUTPUT_FORMAT: github
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.11.0
|
rev: v0.11.12
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff-check
|
||||||
args:
|
args:
|
||||||
- --fix
|
- --fix
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
@@ -30,7 +30,7 @@ repos:
|
|||||||
- --branch=master
|
- --branch=master
|
||||||
- --branch=rc
|
- --branch=rc
|
||||||
- repo: https://github.com/adrienverge/yamllint.git
|
- repo: https://github.com/adrienverge/yamllint.git
|
||||||
rev: v1.35.1
|
rev: v1.37.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: yamllint
|
- id: yamllint
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
|
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Ruff",
|
"label": "Ruff",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pre-commit run ruff --all-files",
|
"command": "pre-commit run ruff-check --all-files",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
@@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from .const import CONNECTION_TYPE, LOCAL
|
from .const import CONNECTION_TYPE, LOCAL
|
||||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||||
|
|
||||||
PLATFORMS = [Platform.CLIMATE]
|
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||||
|
@@ -41,7 +41,30 @@ class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
|||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||||
"""Fetch data from the Adax."""
|
"""Fetch data from the Adax."""
|
||||||
rooms = await self.adax_data_handler.get_rooms() or []
|
try:
|
||||||
|
if hasattr(self.adax_data_handler, "fetch_rooms_info"):
|
||||||
|
rooms = await self.adax_data_handler.fetch_rooms_info() or []
|
||||||
|
_LOGGER.debug("fetch_rooms_info returned: %s", rooms)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("fetch_rooms_info method not available, using get_rooms")
|
||||||
|
rooms = []
|
||||||
|
|
||||||
|
if not rooms:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"No rooms from fetch_rooms_info, trying get_rooms as fallback"
|
||||||
|
)
|
||||||
|
rooms = await self.adax_data_handler.get_rooms() or []
|
||||||
|
_LOGGER.debug("get_rooms fallback returned: %s", rooms)
|
||||||
|
|
||||||
|
if not rooms:
|
||||||
|
raise UpdateFailed("No rooms available from Adax API")
|
||||||
|
|
||||||
|
except OSError as e:
|
||||||
|
raise UpdateFailed(f"Error communicating with API: {e}") from e
|
||||||
|
|
||||||
|
for room in rooms:
|
||||||
|
room["energyWh"] = int(room.get("energyWh", 0))
|
||||||
|
|
||||||
return {r["id"]: r for r in rooms}
|
return {r["id"]: r for r in rooms}
|
||||||
|
|
||||||
|
|
||||||
|
77
homeassistant/components/adax/sensor.py
Normal file
77
homeassistant/components/adax/sensor.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""Support for Adax energy sensors."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorStateClass,
|
||||||
|
)
|
||||||
|
from homeassistant.const import UnitOfEnergy
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from . import AdaxConfigEntry
|
||||||
|
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||||
|
from .coordinator import AdaxCloudCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AdaxConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the Adax energy sensors with config flow."""
|
||||||
|
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||||
|
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||||
|
|
||||||
|
# Create individual energy sensors for each device
|
||||||
|
async_add_entities(
|
||||||
|
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||||
|
for device_id in cloud_coordinator.data
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||||
|
"""Representation of an Adax energy sensor."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_translation_key = "energy"
|
||||||
|
_attr_device_class = SensorDeviceClass.ENERGY
|
||||||
|
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||||
|
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||||
|
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||||
|
_attr_suggested_display_precision = 3
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AdaxCloudCoordinator,
|
||||||
|
device_id: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the energy sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._device_id = device_id
|
||||||
|
room = coordinator.data[device_id]
|
||||||
|
|
||||||
|
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, device_id)},
|
||||||
|
name=room["name"],
|
||||||
|
manufacturer="Adax",
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if entity is available."""
|
||||||
|
return (
|
||||||
|
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> int:
|
||||||
|
"""Return the native value of the sensor."""
|
||||||
|
return int(self.coordinator.data[self._device_id]["energyWh"])
|
@@ -118,5 +118,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["aioamazondevices==3.0.4"]
|
"requirements": ["aioamazondevices==3.0.5"]
|
||||||
}
|
}
|
||||||
|
@@ -6,6 +6,7 @@ from homeassistant.components.water_heater import (
|
|||||||
STATE_ECO,
|
STATE_ECO,
|
||||||
STATE_PERFORMANCE,
|
STATE_PERFORMANCE,
|
||||||
WaterHeaterEntity,
|
WaterHeaterEntity,
|
||||||
|
WaterHeaterEntityFeature,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, Platform, UnitOfTemperature
|
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, Platform, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@@ -32,6 +33,7 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
|||||||
"""Representation of an ATAG water heater."""
|
"""Representation of an ATAG water heater."""
|
||||||
|
|
||||||
_attr_operation_list = OPERATION_LIST
|
_attr_operation_list = OPERATION_LIST
|
||||||
|
_attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@@ -22,5 +22,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["eq3btsmart"],
|
"loggers": ["eq3btsmart"],
|
||||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.15.1"]
|
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.16.0"]
|
||||||
}
|
}
|
||||||
|
@@ -19,7 +19,7 @@
|
|||||||
"requirements": [
|
"requirements": [
|
||||||
"aioesphomeapi==31.1.0",
|
"aioesphomeapi==31.1.0",
|
||||||
"esphome-dashboard-api==1.3.0",
|
"esphome-dashboard-api==1.3.0",
|
||||||
"bleak-esphome==2.15.1"
|
"bleak-esphome==2.16.0"
|
||||||
],
|
],
|
||||||
"zeroconf": ["_esphomelib._tcp.local."]
|
"zeroconf": ["_esphomelib._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -71,6 +71,11 @@ class EvoDHW(EvoChild, WaterHeaterEntity):
|
|||||||
_attr_name = "DHW controller"
|
_attr_name = "DHW controller"
|
||||||
_attr_icon = "mdi:thermometer-lines"
|
_attr_icon = "mdi:thermometer-lines"
|
||||||
_attr_operation_list = list(HA_STATE_TO_EVO)
|
_attr_operation_list = list(HA_STATE_TO_EVO)
|
||||||
|
_attr_supported_features = (
|
||||||
|
WaterHeaterEntityFeature.AWAY_MODE
|
||||||
|
| WaterHeaterEntityFeature.ON_OFF
|
||||||
|
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||||
|
)
|
||||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
|
||||||
_evo_device: evo.HotWater
|
_evo_device: evo.HotWater
|
||||||
@@ -91,9 +96,6 @@ class EvoDHW(EvoChild, WaterHeaterEntity):
|
|||||||
self._attr_precision = (
|
self._attr_precision = (
|
||||||
PRECISION_TENTHS if coordinator.client_v1 else PRECISION_WHOLE
|
PRECISION_TENTHS if coordinator.client_v1 else PRECISION_WHOLE
|
||||||
)
|
)
|
||||||
self._attr_supported_features = (
|
|
||||||
WaterHeaterEntityFeature.AWAY_MODE | WaterHeaterEntityFeature.OPERATION_MODE
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_operation(self) -> str | None:
|
def current_operation(self) -> str | None:
|
||||||
|
@@ -84,6 +84,7 @@ async def async_setup_entry(
|
|||||||
name=f"Freebox {sensor_name}",
|
name=f"Freebox {sensor_name}",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
for sensor_name in router.sensors_temperature
|
for sensor_name in router.sensors_temperature
|
||||||
|
@@ -8,6 +8,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["go2rtc-client==0.1.3b0"],
|
"requirements": ["go2rtc-client==0.2.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@@ -24,9 +24,11 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Google Mail platform."""
|
"""Set up the Google Mail integration."""
|
||||||
hass.data.setdefault(DOMAIN, {})[DATA_HASS_CONFIG] = config
|
hass.data.setdefault(DOMAIN, {})[DATA_HASS_CONFIG] = config
|
||||||
|
|
||||||
|
await async_setup_services(hass)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@@ -52,8 +54,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
|
|||||||
entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY]
|
entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY]
|
||||||
)
|
)
|
||||||
|
|
||||||
await async_setup_services(hass)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@@ -7,17 +7,26 @@ from google_photos_library_api.api import GooglePhotosLibraryApi
|
|||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import config_entry_oauth2_flow
|
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from . import api
|
from . import api
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import GooglePhotosConfigEntry, GooglePhotosUpdateCoordinator
|
from .coordinator import GooglePhotosConfigEntry, GooglePhotosUpdateCoordinator
|
||||||
from .services import async_register_services
|
from .services import async_register_services
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["DOMAIN"]
|
||||||
"DOMAIN",
|
|
||||||
]
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up Google Photos integration."""
|
||||||
|
|
||||||
|
async_register_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@@ -48,8 +57,6 @@ async def async_setup_entry(
|
|||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
entry.runtime_data = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
async_register_services(hass)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@@ -152,11 +152,10 @@ def async_register_services(hass: HomeAssistant) -> None:
|
|||||||
}
|
}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE):
|
hass.services.async_register(
|
||||||
hass.services.async_register(
|
DOMAIN,
|
||||||
DOMAIN,
|
UPLOAD_SERVICE,
|
||||||
UPLOAD_SERVICE,
|
async_handle_upload,
|
||||||
async_handle_upload,
|
schema=UPLOAD_SERVICE_SCHEMA,
|
||||||
schema=UPLOAD_SERVICE_SCHEMA,
|
supports_response=SupportsResponse.OPTIONAL,
|
||||||
supports_response=SupportsResponse.OPTIONAL,
|
)
|
||||||
)
|
|
||||||
|
@@ -73,7 +73,9 @@ async def async_setup_entry(
|
|||||||
class HiveWaterHeater(HiveEntity, WaterHeaterEntity):
|
class HiveWaterHeater(HiveEntity, WaterHeaterEntity):
|
||||||
"""Hive Water Heater Device."""
|
"""Hive Water Heater Device."""
|
||||||
|
|
||||||
_attr_supported_features = WaterHeaterEntityFeature.OPERATION_MODE
|
_attr_supported_features = (
|
||||||
|
WaterHeaterEntityFeature.ON_OFF | WaterHeaterEntityFeature.OPERATION_MODE
|
||||||
|
)
|
||||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
_attr_operation_list = SUPPORT_WATER_HEATER
|
_attr_operation_list = SUPPORT_WATER_HEATER
|
||||||
|
|
||||||
|
@@ -14,6 +14,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aiohomekit", "commentjson"],
|
"loggers": ["aiohomekit", "commentjson"],
|
||||||
"requirements": ["aiohomekit==3.2.14"],
|
"requirements": ["aiohomekit==3.2.15"],
|
||||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -21,7 +21,7 @@ from .const import (
|
|||||||
HMIPC_NAME,
|
HMIPC_NAME,
|
||||||
)
|
)
|
||||||
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
||||||
from .services import async_setup_services, async_unload_services
|
from .services import async_setup_services
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
@@ -63,6 +63,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await async_setup_services(hass)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@@ -83,7 +85,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomematicIPConfigEntry)
|
|||||||
if not await hap.async_setup():
|
if not await hap.async_setup():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
await async_setup_services(hass)
|
|
||||||
_async_remove_obsolete_entities(hass, entry, hap)
|
_async_remove_obsolete_entities(hass, entry, hap)
|
||||||
|
|
||||||
# Register on HA stop event to gracefully shutdown HomematicIP Cloud connection
|
# Register on HA stop event to gracefully shutdown HomematicIP Cloud connection
|
||||||
@@ -115,8 +116,6 @@ async def async_unload_entry(
|
|||||||
assert hap.reset_connection_listener is not None
|
assert hap.reset_connection_listener is not None
|
||||||
hap.reset_connection_listener()
|
hap.reset_connection_listener()
|
||||||
|
|
||||||
await async_unload_services(hass)
|
|
||||||
|
|
||||||
return await hap.async_reset()
|
return await hap.async_reset()
|
||||||
|
|
||||||
|
|
||||||
|
@@ -123,32 +123,29 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
|||||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||||
"""Set up the HomematicIP Cloud services."""
|
"""Set up the HomematicIP Cloud services."""
|
||||||
|
|
||||||
if hass.services.async_services_for_domain(DOMAIN):
|
|
||||||
return
|
|
||||||
|
|
||||||
@verify_domain_control(hass, DOMAIN)
|
@verify_domain_control(hass, DOMAIN)
|
||||||
async def async_call_hmipc_service(service: ServiceCall) -> None:
|
async def async_call_hmipc_service(service: ServiceCall) -> None:
|
||||||
"""Call correct HomematicIP Cloud service."""
|
"""Call correct HomematicIP Cloud service."""
|
||||||
service_name = service.service
|
service_name = service.service
|
||||||
|
|
||||||
if service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION:
|
if service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION:
|
||||||
await _async_activate_eco_mode_with_duration(hass, service)
|
await _async_activate_eco_mode_with_duration(service)
|
||||||
elif service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_PERIOD:
|
elif service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_PERIOD:
|
||||||
await _async_activate_eco_mode_with_period(hass, service)
|
await _async_activate_eco_mode_with_period(service)
|
||||||
elif service_name == SERVICE_ACTIVATE_VACATION:
|
elif service_name == SERVICE_ACTIVATE_VACATION:
|
||||||
await _async_activate_vacation(hass, service)
|
await _async_activate_vacation(service)
|
||||||
elif service_name == SERVICE_DEACTIVATE_ECO_MODE:
|
elif service_name == SERVICE_DEACTIVATE_ECO_MODE:
|
||||||
await _async_deactivate_eco_mode(hass, service)
|
await _async_deactivate_eco_mode(service)
|
||||||
elif service_name == SERVICE_DEACTIVATE_VACATION:
|
elif service_name == SERVICE_DEACTIVATE_VACATION:
|
||||||
await _async_deactivate_vacation(hass, service)
|
await _async_deactivate_vacation(service)
|
||||||
elif service_name == SERVICE_DUMP_HAP_CONFIG:
|
elif service_name == SERVICE_DUMP_HAP_CONFIG:
|
||||||
await _async_dump_hap_config(hass, service)
|
await _async_dump_hap_config(service)
|
||||||
elif service_name == SERVICE_RESET_ENERGY_COUNTER:
|
elif service_name == SERVICE_RESET_ENERGY_COUNTER:
|
||||||
await _async_reset_energy_counter(hass, service)
|
await _async_reset_energy_counter(service)
|
||||||
elif service_name == SERVICE_SET_ACTIVE_CLIMATE_PROFILE:
|
elif service_name == SERVICE_SET_ACTIVE_CLIMATE_PROFILE:
|
||||||
await _set_active_climate_profile(hass, service)
|
await _set_active_climate_profile(service)
|
||||||
elif service_name == SERVICE_SET_HOME_COOLING_MODE:
|
elif service_name == SERVICE_SET_HOME_COOLING_MODE:
|
||||||
await _async_set_home_cooling_mode(hass, service)
|
await _async_set_home_cooling_mode(service)
|
||||||
|
|
||||||
hass.services.async_register(
|
hass.services.async_register(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@@ -217,90 +214,75 @@ async def async_setup_services(hass: HomeAssistant) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_services(hass: HomeAssistant):
|
async def _async_activate_eco_mode_with_duration(service: ServiceCall) -> None:
|
||||||
"""Unload HomematicIP Cloud services."""
|
|
||||||
if hass.config_entries.async_loaded_entries(DOMAIN):
|
|
||||||
return
|
|
||||||
|
|
||||||
for hmipc_service in HMIPC_SERVICES:
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=hmipc_service)
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_activate_eco_mode_with_duration(
|
|
||||||
hass: HomeAssistant, service: ServiceCall
|
|
||||||
) -> None:
|
|
||||||
"""Service to activate eco mode with duration."""
|
"""Service to activate eco mode with duration."""
|
||||||
duration = service.data[ATTR_DURATION]
|
duration = service.data[ATTR_DURATION]
|
||||||
|
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.activate_absence_with_duration_async(duration)
|
await home.activate_absence_with_duration_async(duration)
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.activate_absence_with_duration_async(duration)
|
await entry.runtime_data.home.activate_absence_with_duration_async(duration)
|
||||||
|
|
||||||
|
|
||||||
async def _async_activate_eco_mode_with_period(
|
async def _async_activate_eco_mode_with_period(service: ServiceCall) -> None:
|
||||||
hass: HomeAssistant, service: ServiceCall
|
|
||||||
) -> None:
|
|
||||||
"""Service to activate eco mode with period."""
|
"""Service to activate eco mode with period."""
|
||||||
endtime = service.data[ATTR_ENDTIME]
|
endtime = service.data[ATTR_ENDTIME]
|
||||||
|
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.activate_absence_with_period_async(endtime)
|
await home.activate_absence_with_period_async(endtime)
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.activate_absence_with_period_async(endtime)
|
await entry.runtime_data.home.activate_absence_with_period_async(endtime)
|
||||||
|
|
||||||
|
|
||||||
async def _async_activate_vacation(hass: HomeAssistant, service: ServiceCall) -> None:
|
async def _async_activate_vacation(service: ServiceCall) -> None:
|
||||||
"""Service to activate vacation."""
|
"""Service to activate vacation."""
|
||||||
endtime = service.data[ATTR_ENDTIME]
|
endtime = service.data[ATTR_ENDTIME]
|
||||||
temperature = service.data[ATTR_TEMPERATURE]
|
temperature = service.data[ATTR_TEMPERATURE]
|
||||||
|
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.activate_vacation_async(endtime, temperature)
|
await home.activate_vacation_async(endtime, temperature)
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.activate_vacation_async(endtime, temperature)
|
await entry.runtime_data.home.activate_vacation_async(endtime, temperature)
|
||||||
|
|
||||||
|
|
||||||
async def _async_deactivate_eco_mode(hass: HomeAssistant, service: ServiceCall) -> None:
|
async def _async_deactivate_eco_mode(service: ServiceCall) -> None:
|
||||||
"""Service to deactivate eco mode."""
|
"""Service to deactivate eco mode."""
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.deactivate_absence_async()
|
await home.deactivate_absence_async()
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.deactivate_absence_async()
|
await entry.runtime_data.home.deactivate_absence_async()
|
||||||
|
|
||||||
|
|
||||||
async def _async_deactivate_vacation(hass: HomeAssistant, service: ServiceCall) -> None:
|
async def _async_deactivate_vacation(service: ServiceCall) -> None:
|
||||||
"""Service to deactivate vacation."""
|
"""Service to deactivate vacation."""
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.deactivate_vacation_async()
|
await home.deactivate_vacation_async()
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.deactivate_vacation_async()
|
await entry.runtime_data.home.deactivate_vacation_async()
|
||||||
|
|
||||||
|
|
||||||
async def _set_active_climate_profile(
|
async def _set_active_climate_profile(service: ServiceCall) -> None:
|
||||||
hass: HomeAssistant, service: ServiceCall
|
|
||||||
) -> None:
|
|
||||||
"""Service to set the active climate profile."""
|
"""Service to set the active climate profile."""
|
||||||
entity_id_list = service.data[ATTR_ENTITY_ID]
|
entity_id_list = service.data[ATTR_ENTITY_ID]
|
||||||
climate_profile_index = service.data[ATTR_CLIMATE_PROFILE_INDEX] - 1
|
climate_profile_index = service.data[ATTR_CLIMATE_PROFILE_INDEX] - 1
|
||||||
|
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
if entity_id_list != "all":
|
if entity_id_list != "all":
|
||||||
for entity_id in entity_id_list:
|
for entity_id in entity_id_list:
|
||||||
group = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
group = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
||||||
@@ -312,16 +294,16 @@ async def _set_active_climate_profile(
|
|||||||
await group.set_active_profile_async(climate_profile_index)
|
await group.set_active_profile_async(climate_profile_index)
|
||||||
|
|
||||||
|
|
||||||
async def _async_dump_hap_config(hass: HomeAssistant, service: ServiceCall) -> None:
|
async def _async_dump_hap_config(service: ServiceCall) -> None:
|
||||||
"""Service to dump the configuration of a Homematic IP Access Point."""
|
"""Service to dump the configuration of a Homematic IP Access Point."""
|
||||||
config_path: str = (
|
config_path: str = (
|
||||||
service.data.get(ATTR_CONFIG_OUTPUT_PATH) or hass.config.config_dir
|
service.data.get(ATTR_CONFIG_OUTPUT_PATH) or service.hass.config.config_dir
|
||||||
)
|
)
|
||||||
config_file_prefix = service.data[ATTR_CONFIG_OUTPUT_FILE_PREFIX]
|
config_file_prefix = service.data[ATTR_CONFIG_OUTPUT_FILE_PREFIX]
|
||||||
anonymize = service.data[ATTR_ANONYMIZE]
|
anonymize = service.data[ATTR_ANONYMIZE]
|
||||||
|
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
hap_sgtin = entry.unique_id
|
hap_sgtin = entry.unique_id
|
||||||
assert hap_sgtin is not None
|
assert hap_sgtin is not None
|
||||||
|
|
||||||
@@ -338,12 +320,12 @@ async def _async_dump_hap_config(hass: HomeAssistant, service: ServiceCall) -> N
|
|||||||
config_file.write_text(json_state, encoding="utf8")
|
config_file.write_text(json_state, encoding="utf8")
|
||||||
|
|
||||||
|
|
||||||
async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall):
|
async def _async_reset_energy_counter(service: ServiceCall):
|
||||||
"""Service to reset the energy counter."""
|
"""Service to reset the energy counter."""
|
||||||
entity_id_list = service.data[ATTR_ENTITY_ID]
|
entity_id_list = service.data[ATTR_ENTITY_ID]
|
||||||
|
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
if entity_id_list != "all":
|
if entity_id_list != "all":
|
||||||
for entity_id in entity_id_list:
|
for entity_id in entity_id_list:
|
||||||
device = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
device = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
||||||
@@ -355,16 +337,16 @@ async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall)
|
|||||||
await device.reset_energy_counter_async()
|
await device.reset_energy_counter_async()
|
||||||
|
|
||||||
|
|
||||||
async def _async_set_home_cooling_mode(hass: HomeAssistant, service: ServiceCall):
|
async def _async_set_home_cooling_mode(service: ServiceCall):
|
||||||
"""Service to set the cooling mode."""
|
"""Service to set the cooling mode."""
|
||||||
cooling = service.data[ATTR_COOLING]
|
cooling = service.data[ATTR_COOLING]
|
||||||
|
|
||||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||||
if home := _get_home(hass, hapid):
|
if home := _get_home(service.hass, hapid):
|
||||||
await home.set_cooling_async(cooling)
|
await home.set_cooling_async(cooling)
|
||||||
else:
|
else:
|
||||||
entry: HomematicIPConfigEntry
|
entry: HomematicIPConfigEntry
|
||||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
await entry.runtime_data.home.set_cooling_async(cooling)
|
await entry.runtime_data.home.set_cooling_async(cooling)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -12,6 +12,6 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["homewizard_energy"],
|
"loggers": ["homewizard_energy"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["python-homewizard-energy==v8.3.2"],
|
"requirements": ["python-homewizard-energy==8.3.3"],
|
||||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -5,13 +5,24 @@ from aiohue.util import normalize_bridge_id
|
|||||||
from homeassistant.components import persistent_notification
|
from homeassistant.components import persistent_notification
|
||||||
from homeassistant.config_entries import SOURCE_IGNORE
|
from homeassistant.config_entries import SOURCE_IGNORE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .bridge import HueBridge, HueConfigEntry
|
from .bridge import HueBridge, HueConfigEntry
|
||||||
from .const import DOMAIN, SERVICE_HUE_ACTIVATE_SCENE
|
from .const import DOMAIN
|
||||||
from .migration import check_migration
|
from .migration import check_migration
|
||||||
from .services import async_register_services
|
from .services import async_register_services
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up Hue integration."""
|
||||||
|
|
||||||
|
async_register_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||||
"""Set up a bridge from a config entry."""
|
"""Set up a bridge from a config entry."""
|
||||||
@@ -23,9 +34,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
|||||||
if not await bridge.async_initialize_bridge():
|
if not await bridge.async_initialize_bridge():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# register Hue domain services
|
|
||||||
async_register_services(hass)
|
|
||||||
|
|
||||||
api = bridge.api
|
api = bridge.api
|
||||||
|
|
||||||
# For backwards compat
|
# For backwards compat
|
||||||
@@ -106,7 +114,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
|||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
unload_success = await entry.runtime_data.async_reset()
|
return await entry.runtime_data.async_reset()
|
||||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
|
||||||
hass.services.async_remove(DOMAIN, SERVICE_HUE_ACTIVATE_SCENE)
|
|
||||||
return unload_success
|
|
||||||
|
@@ -59,21 +59,20 @@ def async_register_services(hass: HomeAssistant) -> None:
|
|||||||
group_name,
|
group_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not hass.services.has_service(DOMAIN, SERVICE_HUE_ACTIVATE_SCENE):
|
# Register a local handler for scene activation
|
||||||
# Register a local handler for scene activation
|
hass.services.async_register(
|
||||||
hass.services.async_register(
|
DOMAIN,
|
||||||
DOMAIN,
|
SERVICE_HUE_ACTIVATE_SCENE,
|
||||||
SERVICE_HUE_ACTIVATE_SCENE,
|
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
||||||
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
schema=vol.Schema(
|
||||||
schema=vol.Schema(
|
{
|
||||||
{
|
vol.Required(ATTR_GROUP_NAME): cv.string,
|
||||||
vol.Required(ATTR_GROUP_NAME): cv.string,
|
vol.Required(ATTR_SCENE_NAME): cv.string,
|
||||||
vol.Required(ATTR_SCENE_NAME): cv.string,
|
vol.Optional(ATTR_TRANSITION): cv.positive_int,
|
||||||
vol.Optional(ATTR_TRANSITION): cv.positive_int,
|
vol.Optional(ATTR_DYNAMIC): cv.boolean,
|
||||||
vol.Optional(ATTR_DYNAMIC): cv.boolean,
|
}
|
||||||
}
|
),
|
||||||
),
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def hue_activate_scene_v1(
|
async def hue_activate_scene_v1(
|
||||||
|
@@ -6,19 +6,32 @@ from typing import Any
|
|||||||
|
|
||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.storage import Store
|
from homeassistant.helpers.storage import Store
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .account import IcloudAccount, IcloudConfigEntry
|
from .account import IcloudAccount, IcloudConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_GPS_ACCURACY_THRESHOLD,
|
CONF_GPS_ACCURACY_THRESHOLD,
|
||||||
CONF_MAX_INTERVAL,
|
CONF_MAX_INTERVAL,
|
||||||
CONF_WITH_FAMILY,
|
CONF_WITH_FAMILY,
|
||||||
|
DOMAIN,
|
||||||
PLATFORMS,
|
PLATFORMS,
|
||||||
STORAGE_KEY,
|
STORAGE_KEY,
|
||||||
STORAGE_VERSION,
|
STORAGE_VERSION,
|
||||||
)
|
)
|
||||||
from .services import register_services
|
from .services import register_services
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up iCloud integration."""
|
||||||
|
|
||||||
|
register_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: IcloudConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: IcloudConfigEntry) -> bool:
|
||||||
"""Set up an iCloud account from a config entry."""
|
"""Set up an iCloud account from a config entry."""
|
||||||
@@ -51,8 +64,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: IcloudConfigEntry) -> bo
|
|||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
register_services(hass)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@@ -153,49 +153,40 @@ class ImmichMediaSource(MediaSource):
|
|||||||
except ImmichError:
|
except ImmichError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
ret = [
|
ret: list[BrowseMediaSource] = []
|
||||||
BrowseMediaSource(
|
for asset in album_info.assets:
|
||||||
domain=DOMAIN,
|
if not (mime_type := asset.original_mime_type) or not mime_type.startswith(
|
||||||
identifier=(
|
("image/", "video/")
|
||||||
f"{identifier.unique_id}|albums|"
|
):
|
||||||
f"{identifier.collection_id}|"
|
continue
|
||||||
f"{asset.asset_id}|"
|
|
||||||
f"{asset.original_file_name}|"
|
|
||||||
f"{mime_type}"
|
|
||||||
),
|
|
||||||
media_class=MediaClass.IMAGE,
|
|
||||||
media_content_type=mime_type,
|
|
||||||
title=asset.original_file_name,
|
|
||||||
can_play=False,
|
|
||||||
can_expand=False,
|
|
||||||
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/{mime_type}",
|
|
||||||
)
|
|
||||||
for asset in album_info.assets
|
|
||||||
if (mime_type := asset.original_mime_type)
|
|
||||||
and mime_type.startswith("image/")
|
|
||||||
]
|
|
||||||
|
|
||||||
ret.extend(
|
if mime_type.startswith("image/"):
|
||||||
BrowseMediaSource(
|
media_class = MediaClass.IMAGE
|
||||||
domain=DOMAIN,
|
can_play = False
|
||||||
identifier=(
|
thumb_mime_type = mime_type
|
||||||
f"{identifier.unique_id}|albums|"
|
else:
|
||||||
f"{identifier.collection_id}|"
|
media_class = MediaClass.VIDEO
|
||||||
f"{asset.asset_id}|"
|
can_play = True
|
||||||
f"{asset.original_file_name}|"
|
thumb_mime_type = "image/jpeg"
|
||||||
f"{mime_type}"
|
|
||||||
),
|
ret.append(
|
||||||
media_class=MediaClass.VIDEO,
|
BrowseMediaSource(
|
||||||
media_content_type=mime_type,
|
domain=DOMAIN,
|
||||||
title=asset.original_file_name,
|
identifier=(
|
||||||
can_play=True,
|
f"{identifier.unique_id}|albums|"
|
||||||
can_expand=False,
|
f"{identifier.collection_id}|"
|
||||||
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/image/jpeg",
|
f"{asset.asset_id}|"
|
||||||
|
f"{asset.original_file_name}|"
|
||||||
|
f"{mime_type}"
|
||||||
|
),
|
||||||
|
media_class=media_class,
|
||||||
|
media_content_type=mime_type,
|
||||||
|
title=asset.original_file_name,
|
||||||
|
can_play=can_play,
|
||||||
|
can_expand=False,
|
||||||
|
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/{thumb_mime_type}",
|
||||||
|
)
|
||||||
)
|
)
|
||||||
for asset in album_info.assets
|
|
||||||
if (mime_type := asset.original_mime_type)
|
|
||||||
and mime_type.startswith("video/")
|
|
||||||
)
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@@ -25,9 +25,9 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
INSTEON_PLATFORMS,
|
INSTEON_PLATFORMS,
|
||||||
)
|
)
|
||||||
|
from .services import async_register_services
|
||||||
from .utils import (
|
from .utils import (
|
||||||
add_insteon_events,
|
add_insteon_events,
|
||||||
async_register_services,
|
|
||||||
get_device_platforms,
|
get_device_platforms,
|
||||||
register_new_device_callback,
|
register_new_device_callback,
|
||||||
)
|
)
|
||||||
|
291
homeassistant/components/insteon/services.py
Normal file
291
homeassistant/components/insteon/services.py
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
"""Utilities used by insteon component."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pyinsteon import devices
|
||||||
|
from pyinsteon.address import Address
|
||||||
|
from pyinsteon.managers.link_manager import (
|
||||||
|
async_enter_linking_mode,
|
||||||
|
async_enter_unlinking_mode,
|
||||||
|
)
|
||||||
|
from pyinsteon.managers.scene_manager import (
|
||||||
|
async_trigger_scene_off,
|
||||||
|
async_trigger_scene_on,
|
||||||
|
)
|
||||||
|
from pyinsteon.managers.x10_manager import (
|
||||||
|
async_x10_all_lights_off,
|
||||||
|
async_x10_all_lights_on,
|
||||||
|
async_x10_all_units_off,
|
||||||
|
)
|
||||||
|
from pyinsteon.x10_address import create as create_x10_address
|
||||||
|
|
||||||
|
from homeassistant.const import (
|
||||||
|
CONF_ADDRESS,
|
||||||
|
CONF_ENTITY_ID,
|
||||||
|
CONF_PLATFORM,
|
||||||
|
ENTITY_MATCH_ALL,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
from homeassistant.helpers.dispatcher import (
|
||||||
|
async_dispatcher_connect,
|
||||||
|
async_dispatcher_send,
|
||||||
|
dispatcher_send,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
CONF_CAT,
|
||||||
|
CONF_DIM_STEPS,
|
||||||
|
CONF_HOUSECODE,
|
||||||
|
CONF_SUBCAT,
|
||||||
|
CONF_UNITCODE,
|
||||||
|
DOMAIN,
|
||||||
|
SIGNAL_ADD_DEFAULT_LINKS,
|
||||||
|
SIGNAL_ADD_DEVICE_OVERRIDE,
|
||||||
|
SIGNAL_ADD_X10_DEVICE,
|
||||||
|
SIGNAL_LOAD_ALDB,
|
||||||
|
SIGNAL_PRINT_ALDB,
|
||||||
|
SIGNAL_REMOVE_DEVICE_OVERRIDE,
|
||||||
|
SIGNAL_REMOVE_ENTITY,
|
||||||
|
SIGNAL_REMOVE_HA_DEVICE,
|
||||||
|
SIGNAL_REMOVE_INSTEON_DEVICE,
|
||||||
|
SIGNAL_REMOVE_X10_DEVICE,
|
||||||
|
SIGNAL_SAVE_DEVICES,
|
||||||
|
SRV_ADD_ALL_LINK,
|
||||||
|
SRV_ADD_DEFAULT_LINKS,
|
||||||
|
SRV_ALL_LINK_GROUP,
|
||||||
|
SRV_ALL_LINK_MODE,
|
||||||
|
SRV_CONTROLLER,
|
||||||
|
SRV_DEL_ALL_LINK,
|
||||||
|
SRV_HOUSECODE,
|
||||||
|
SRV_LOAD_ALDB,
|
||||||
|
SRV_LOAD_DB_RELOAD,
|
||||||
|
SRV_PRINT_ALDB,
|
||||||
|
SRV_PRINT_IM_ALDB,
|
||||||
|
SRV_SCENE_OFF,
|
||||||
|
SRV_SCENE_ON,
|
||||||
|
SRV_X10_ALL_LIGHTS_OFF,
|
||||||
|
SRV_X10_ALL_LIGHTS_ON,
|
||||||
|
SRV_X10_ALL_UNITS_OFF,
|
||||||
|
)
|
||||||
|
from .schemas import (
|
||||||
|
ADD_ALL_LINK_SCHEMA,
|
||||||
|
ADD_DEFAULT_LINKS_SCHEMA,
|
||||||
|
DEL_ALL_LINK_SCHEMA,
|
||||||
|
LOAD_ALDB_SCHEMA,
|
||||||
|
PRINT_ALDB_SCHEMA,
|
||||||
|
TRIGGER_SCENE_SCHEMA,
|
||||||
|
X10_HOUSECODE_SCHEMA,
|
||||||
|
)
|
||||||
|
from .utils import print_aldb_to_log
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_register_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||||
|
"""Register services used by insteon component."""
|
||||||
|
|
||||||
|
save_lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def async_srv_add_all_link(service: ServiceCall) -> None:
|
||||||
|
"""Add an INSTEON All-Link between two devices."""
|
||||||
|
group = service.data[SRV_ALL_LINK_GROUP]
|
||||||
|
mode = service.data[SRV_ALL_LINK_MODE]
|
||||||
|
link_mode = mode.lower() == SRV_CONTROLLER
|
||||||
|
await async_enter_linking_mode(link_mode, group)
|
||||||
|
|
||||||
|
async def async_srv_del_all_link(service: ServiceCall) -> None:
|
||||||
|
"""Delete an INSTEON All-Link between two devices."""
|
||||||
|
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||||
|
await async_enter_unlinking_mode(group)
|
||||||
|
|
||||||
|
async def async_srv_load_aldb(service: ServiceCall) -> None:
|
||||||
|
"""Load the device All-Link database."""
|
||||||
|
entity_id = service.data[CONF_ENTITY_ID]
|
||||||
|
reload = service.data[SRV_LOAD_DB_RELOAD]
|
||||||
|
if entity_id.lower() == ENTITY_MATCH_ALL:
|
||||||
|
await async_srv_load_aldb_all(reload)
|
||||||
|
else:
|
||||||
|
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
|
||||||
|
async_dispatcher_send(hass, signal, reload)
|
||||||
|
|
||||||
|
async def async_srv_load_aldb_all(reload):
|
||||||
|
"""Load the All-Link database for all devices."""
|
||||||
|
# Cannot be done concurrently due to issues with the underlying protocol.
|
||||||
|
for address in devices:
|
||||||
|
device = devices[address]
|
||||||
|
if device != devices.modem and device.cat != 0x03:
|
||||||
|
await device.aldb.async_load(refresh=reload)
|
||||||
|
await async_srv_save_devices()
|
||||||
|
|
||||||
|
async def async_srv_save_devices():
|
||||||
|
"""Write the Insteon device configuration to file."""
|
||||||
|
async with save_lock:
|
||||||
|
_LOGGER.debug("Saving Insteon devices")
|
||||||
|
await devices.async_save(hass.config.config_dir)
|
||||||
|
|
||||||
|
def print_aldb(service: ServiceCall) -> None:
|
||||||
|
"""Print the All-Link Database for a device."""
|
||||||
|
# For now this sends logs to the log file.
|
||||||
|
# Future direction is to create an INSTEON control panel.
|
||||||
|
entity_id = service.data[CONF_ENTITY_ID]
|
||||||
|
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
|
||||||
|
dispatcher_send(hass, signal)
|
||||||
|
|
||||||
|
def print_im_aldb(service: ServiceCall) -> None:
|
||||||
|
"""Print the All-Link Database for a device."""
|
||||||
|
# For now this sends logs to the log file.
|
||||||
|
# Future direction is to create an INSTEON control panel.
|
||||||
|
print_aldb_to_log(devices.modem.aldb)
|
||||||
|
|
||||||
|
async def async_srv_x10_all_units_off(service: ServiceCall) -> None:
|
||||||
|
"""Send the X10 All Units Off command."""
|
||||||
|
housecode = service.data.get(SRV_HOUSECODE)
|
||||||
|
await async_x10_all_units_off(housecode)
|
||||||
|
|
||||||
|
async def async_srv_x10_all_lights_off(service: ServiceCall) -> None:
|
||||||
|
"""Send the X10 All Lights Off command."""
|
||||||
|
housecode = service.data.get(SRV_HOUSECODE)
|
||||||
|
await async_x10_all_lights_off(housecode)
|
||||||
|
|
||||||
|
async def async_srv_x10_all_lights_on(service: ServiceCall) -> None:
|
||||||
|
"""Send the X10 All Lights On command."""
|
||||||
|
housecode = service.data.get(SRV_HOUSECODE)
|
||||||
|
await async_x10_all_lights_on(housecode)
|
||||||
|
|
||||||
|
async def async_srv_scene_on(service: ServiceCall) -> None:
|
||||||
|
"""Trigger an INSTEON scene ON."""
|
||||||
|
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||||
|
await async_trigger_scene_on(group)
|
||||||
|
|
||||||
|
async def async_srv_scene_off(service: ServiceCall) -> None:
|
||||||
|
"""Trigger an INSTEON scene ON."""
|
||||||
|
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||||
|
await async_trigger_scene_off(group)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_add_default_links(service: ServiceCall) -> None:
|
||||||
|
"""Add the default All-Link entries to a device."""
|
||||||
|
entity_id = service.data[CONF_ENTITY_ID]
|
||||||
|
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
|
||||||
|
async_dispatcher_send(hass, signal)
|
||||||
|
|
||||||
|
async def async_add_device_override(override):
|
||||||
|
"""Remove an Insten device and associated entities."""
|
||||||
|
address = Address(override[CONF_ADDRESS])
|
||||||
|
await async_remove_ha_device(address)
|
||||||
|
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
|
||||||
|
await async_srv_save_devices()
|
||||||
|
|
||||||
|
async def async_remove_device_override(address):
|
||||||
|
"""Remove an Insten device and associated entities."""
|
||||||
|
address = Address(address)
|
||||||
|
await async_remove_ha_device(address)
|
||||||
|
devices.set_id(address, None, None, None)
|
||||||
|
await devices.async_identify_device(address)
|
||||||
|
await async_srv_save_devices()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_add_x10_device(x10_config):
|
||||||
|
"""Add X10 device."""
|
||||||
|
housecode = x10_config[CONF_HOUSECODE]
|
||||||
|
unitcode = x10_config[CONF_UNITCODE]
|
||||||
|
platform = x10_config[CONF_PLATFORM]
|
||||||
|
steps = x10_config.get(CONF_DIM_STEPS, 22)
|
||||||
|
x10_type = "on_off"
|
||||||
|
if platform == "light":
|
||||||
|
x10_type = "dimmable"
|
||||||
|
elif platform == "binary_sensor":
|
||||||
|
x10_type = "sensor"
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
|
||||||
|
)
|
||||||
|
# This must be run in the event loop
|
||||||
|
devices.add_x10_device(housecode, unitcode, x10_type, steps)
|
||||||
|
|
||||||
|
async def async_remove_x10_device(housecode, unitcode):
|
||||||
|
"""Remove an X10 device and associated entities."""
|
||||||
|
address = create_x10_address(housecode, unitcode)
|
||||||
|
devices.pop(address)
|
||||||
|
await async_remove_ha_device(address)
|
||||||
|
|
||||||
|
async def async_remove_ha_device(address: Address, remove_all_refs: bool = False):
|
||||||
|
"""Remove the device and all entities from hass."""
|
||||||
|
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
|
||||||
|
async_dispatcher_send(hass, signal)
|
||||||
|
dev_registry = dr.async_get(hass)
|
||||||
|
device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))})
|
||||||
|
if device:
|
||||||
|
dev_registry.async_remove_device(device.id)
|
||||||
|
|
||||||
|
async def async_remove_insteon_device(
|
||||||
|
address: Address, remove_all_refs: bool = False
|
||||||
|
):
|
||||||
|
"""Remove the underlying Insteon device from the network."""
|
||||||
|
await devices.async_remove_device(
|
||||||
|
address=address, force=False, remove_all_refs=remove_all_refs
|
||||||
|
)
|
||||||
|
await async_srv_save_devices()
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
|
||||||
|
)
|
||||||
|
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SRV_X10_ALL_UNITS_OFF,
|
||||||
|
async_srv_x10_all_units_off,
|
||||||
|
schema=X10_HOUSECODE_SCHEMA,
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SRV_X10_ALL_LIGHTS_OFF,
|
||||||
|
async_srv_x10_all_lights_off,
|
||||||
|
schema=X10_HOUSECODE_SCHEMA,
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SRV_X10_ALL_LIGHTS_ON,
|
||||||
|
async_srv_x10_all_lights_on,
|
||||||
|
schema=X10_HOUSECODE_SCHEMA,
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
|
||||||
|
)
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SRV_ADD_DEFAULT_LINKS,
|
||||||
|
async_add_default_links,
|
||||||
|
schema=ADD_DEFAULT_LINKS_SCHEMA,
|
||||||
|
)
|
||||||
|
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
|
||||||
|
async_dispatcher_connect(
|
||||||
|
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
|
||||||
|
)
|
||||||
|
async_dispatcher_connect(
|
||||||
|
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
|
||||||
|
)
|
||||||
|
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
|
||||||
|
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
|
||||||
|
async_dispatcher_connect(hass, SIGNAL_REMOVE_HA_DEVICE, async_remove_ha_device)
|
||||||
|
async_dispatcher_connect(
|
||||||
|
hass, SIGNAL_REMOVE_INSTEON_DEVICE, async_remove_insteon_device
|
||||||
|
)
|
||||||
|
_LOGGER.debug("Insteon Services registered")
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@@ -12,90 +11,25 @@ from pyinsteon.address import Address
|
|||||||
from pyinsteon.constants import ALDBStatus, DeviceAction
|
from pyinsteon.constants import ALDBStatus, DeviceAction
|
||||||
from pyinsteon.device_types.device_base import Device
|
from pyinsteon.device_types.device_base import Device
|
||||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT, Event
|
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT, Event
|
||||||
from pyinsteon.managers.link_manager import (
|
|
||||||
async_enter_linking_mode,
|
|
||||||
async_enter_unlinking_mode,
|
|
||||||
)
|
|
||||||
from pyinsteon.managers.scene_manager import (
|
|
||||||
async_trigger_scene_off,
|
|
||||||
async_trigger_scene_on,
|
|
||||||
)
|
|
||||||
from pyinsteon.managers.x10_manager import (
|
|
||||||
async_x10_all_lights_off,
|
|
||||||
async_x10_all_lights_on,
|
|
||||||
async_x10_all_units_off,
|
|
||||||
)
|
|
||||||
from pyinsteon.x10_address import create as create_x10_address
|
|
||||||
from serial.tools import list_ports
|
from serial.tools import list_ports
|
||||||
|
|
||||||
from homeassistant.components import usb
|
from homeassistant.components import usb
|
||||||
from homeassistant.const import (
|
from homeassistant.const import CONF_ADDRESS, Platform
|
||||||
CONF_ADDRESS,
|
from homeassistant.core import HomeAssistant, callback
|
||||||
CONF_ENTITY_ID,
|
|
||||||
CONF_PLATFORM,
|
|
||||||
ENTITY_MATCH_ALL,
|
|
||||||
Platform,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.dispatcher import (
|
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||||
async_dispatcher_connect,
|
|
||||||
async_dispatcher_send,
|
|
||||||
dispatcher_send,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_CAT,
|
|
||||||
CONF_DIM_STEPS,
|
|
||||||
CONF_HOUSECODE,
|
|
||||||
CONF_SUBCAT,
|
|
||||||
CONF_UNITCODE,
|
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
EVENT_CONF_BUTTON,
|
EVENT_CONF_BUTTON,
|
||||||
EVENT_GROUP_OFF,
|
EVENT_GROUP_OFF,
|
||||||
EVENT_GROUP_OFF_FAST,
|
EVENT_GROUP_OFF_FAST,
|
||||||
EVENT_GROUP_ON,
|
EVENT_GROUP_ON,
|
||||||
EVENT_GROUP_ON_FAST,
|
EVENT_GROUP_ON_FAST,
|
||||||
SIGNAL_ADD_DEFAULT_LINKS,
|
|
||||||
SIGNAL_ADD_DEVICE_OVERRIDE,
|
|
||||||
SIGNAL_ADD_ENTITIES,
|
SIGNAL_ADD_ENTITIES,
|
||||||
SIGNAL_ADD_X10_DEVICE,
|
|
||||||
SIGNAL_LOAD_ALDB,
|
|
||||||
SIGNAL_PRINT_ALDB,
|
|
||||||
SIGNAL_REMOVE_DEVICE_OVERRIDE,
|
|
||||||
SIGNAL_REMOVE_ENTITY,
|
|
||||||
SIGNAL_REMOVE_HA_DEVICE,
|
|
||||||
SIGNAL_REMOVE_INSTEON_DEVICE,
|
|
||||||
SIGNAL_REMOVE_X10_DEVICE,
|
|
||||||
SIGNAL_SAVE_DEVICES,
|
|
||||||
SRV_ADD_ALL_LINK,
|
|
||||||
SRV_ADD_DEFAULT_LINKS,
|
|
||||||
SRV_ALL_LINK_GROUP,
|
|
||||||
SRV_ALL_LINK_MODE,
|
|
||||||
SRV_CONTROLLER,
|
|
||||||
SRV_DEL_ALL_LINK,
|
|
||||||
SRV_HOUSECODE,
|
|
||||||
SRV_LOAD_ALDB,
|
|
||||||
SRV_LOAD_DB_RELOAD,
|
|
||||||
SRV_PRINT_ALDB,
|
|
||||||
SRV_PRINT_IM_ALDB,
|
|
||||||
SRV_SCENE_OFF,
|
|
||||||
SRV_SCENE_ON,
|
|
||||||
SRV_X10_ALL_LIGHTS_OFF,
|
|
||||||
SRV_X10_ALL_LIGHTS_ON,
|
|
||||||
SRV_X10_ALL_UNITS_OFF,
|
|
||||||
)
|
)
|
||||||
from .ipdb import get_device_platform_groups, get_device_platforms
|
from .ipdb import get_device_platform_groups, get_device_platforms
|
||||||
from .schemas import (
|
|
||||||
ADD_ALL_LINK_SCHEMA,
|
|
||||||
ADD_DEFAULT_LINKS_SCHEMA,
|
|
||||||
DEL_ALL_LINK_SCHEMA,
|
|
||||||
LOAD_ALDB_SCHEMA,
|
|
||||||
PRINT_ALDB_SCHEMA,
|
|
||||||
TRIGGER_SCENE_SCHEMA,
|
|
||||||
X10_HOUSECODE_SCHEMA,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .entity import InsteonEntity
|
from .entity import InsteonEntity
|
||||||
@@ -154,7 +88,7 @@ def add_insteon_events(hass: HomeAssistant, device: Device) -> None:
|
|||||||
_register_event(event, async_fire_insteon_event)
|
_register_event(event, async_fire_insteon_event)
|
||||||
|
|
||||||
|
|
||||||
def register_new_device_callback(hass):
|
def register_new_device_callback(hass: HomeAssistant) -> None:
|
||||||
"""Register callback for new Insteon device."""
|
"""Register callback for new Insteon device."""
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@@ -180,212 +114,6 @@ def register_new_device_callback(hass):
|
|||||||
devices.subscribe(async_new_insteon_device, force_strong_ref=True)
|
devices.subscribe(async_new_insteon_device, force_strong_ref=True)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_register_services(hass): # noqa: C901
|
|
||||||
"""Register services used by insteon component."""
|
|
||||||
|
|
||||||
save_lock = asyncio.Lock()
|
|
||||||
|
|
||||||
async def async_srv_add_all_link(service: ServiceCall) -> None:
|
|
||||||
"""Add an INSTEON All-Link between two devices."""
|
|
||||||
group = service.data[SRV_ALL_LINK_GROUP]
|
|
||||||
mode = service.data[SRV_ALL_LINK_MODE]
|
|
||||||
link_mode = mode.lower() == SRV_CONTROLLER
|
|
||||||
await async_enter_linking_mode(link_mode, group)
|
|
||||||
|
|
||||||
async def async_srv_del_all_link(service: ServiceCall) -> None:
|
|
||||||
"""Delete an INSTEON All-Link between two devices."""
|
|
||||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
|
||||||
await async_enter_unlinking_mode(group)
|
|
||||||
|
|
||||||
async def async_srv_load_aldb(service: ServiceCall) -> None:
|
|
||||||
"""Load the device All-Link database."""
|
|
||||||
entity_id = service.data[CONF_ENTITY_ID]
|
|
||||||
reload = service.data[SRV_LOAD_DB_RELOAD]
|
|
||||||
if entity_id.lower() == ENTITY_MATCH_ALL:
|
|
||||||
await async_srv_load_aldb_all(reload)
|
|
||||||
else:
|
|
||||||
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
|
|
||||||
async_dispatcher_send(hass, signal, reload)
|
|
||||||
|
|
||||||
async def async_srv_load_aldb_all(reload):
|
|
||||||
"""Load the All-Link database for all devices."""
|
|
||||||
# Cannot be done concurrently due to issues with the underlying protocol.
|
|
||||||
for address in devices:
|
|
||||||
device = devices[address]
|
|
||||||
if device != devices.modem and device.cat != 0x03:
|
|
||||||
await device.aldb.async_load(refresh=reload)
|
|
||||||
await async_srv_save_devices()
|
|
||||||
|
|
||||||
async def async_srv_save_devices():
|
|
||||||
"""Write the Insteon device configuration to file."""
|
|
||||||
async with save_lock:
|
|
||||||
_LOGGER.debug("Saving Insteon devices")
|
|
||||||
await devices.async_save(hass.config.config_dir)
|
|
||||||
|
|
||||||
def print_aldb(service: ServiceCall) -> None:
|
|
||||||
"""Print the All-Link Database for a device."""
|
|
||||||
# For now this sends logs to the log file.
|
|
||||||
# Future direction is to create an INSTEON control panel.
|
|
||||||
entity_id = service.data[CONF_ENTITY_ID]
|
|
||||||
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
|
|
||||||
dispatcher_send(hass, signal)
|
|
||||||
|
|
||||||
def print_im_aldb(service: ServiceCall) -> None:
|
|
||||||
"""Print the All-Link Database for a device."""
|
|
||||||
# For now this sends logs to the log file.
|
|
||||||
# Future direction is to create an INSTEON control panel.
|
|
||||||
print_aldb_to_log(devices.modem.aldb)
|
|
||||||
|
|
||||||
async def async_srv_x10_all_units_off(service: ServiceCall) -> None:
|
|
||||||
"""Send the X10 All Units Off command."""
|
|
||||||
housecode = service.data.get(SRV_HOUSECODE)
|
|
||||||
await async_x10_all_units_off(housecode)
|
|
||||||
|
|
||||||
async def async_srv_x10_all_lights_off(service: ServiceCall) -> None:
|
|
||||||
"""Send the X10 All Lights Off command."""
|
|
||||||
housecode = service.data.get(SRV_HOUSECODE)
|
|
||||||
await async_x10_all_lights_off(housecode)
|
|
||||||
|
|
||||||
async def async_srv_x10_all_lights_on(service: ServiceCall) -> None:
|
|
||||||
"""Send the X10 All Lights On command."""
|
|
||||||
housecode = service.data.get(SRV_HOUSECODE)
|
|
||||||
await async_x10_all_lights_on(housecode)
|
|
||||||
|
|
||||||
async def async_srv_scene_on(service: ServiceCall) -> None:
|
|
||||||
"""Trigger an INSTEON scene ON."""
|
|
||||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
|
||||||
await async_trigger_scene_on(group)
|
|
||||||
|
|
||||||
async def async_srv_scene_off(service: ServiceCall) -> None:
|
|
||||||
"""Trigger an INSTEON scene ON."""
|
|
||||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
|
||||||
await async_trigger_scene_off(group)
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_add_default_links(service: ServiceCall) -> None:
|
|
||||||
"""Add the default All-Link entries to a device."""
|
|
||||||
entity_id = service.data[CONF_ENTITY_ID]
|
|
||||||
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
|
|
||||||
async_dispatcher_send(hass, signal)
|
|
||||||
|
|
||||||
async def async_add_device_override(override):
|
|
||||||
"""Remove an Insten device and associated entities."""
|
|
||||||
address = Address(override[CONF_ADDRESS])
|
|
||||||
await async_remove_ha_device(address)
|
|
||||||
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
|
|
||||||
await async_srv_save_devices()
|
|
||||||
|
|
||||||
async def async_remove_device_override(address):
|
|
||||||
"""Remove an Insten device and associated entities."""
|
|
||||||
address = Address(address)
|
|
||||||
await async_remove_ha_device(address)
|
|
||||||
devices.set_id(address, None, None, None)
|
|
||||||
await devices.async_identify_device(address)
|
|
||||||
await async_srv_save_devices()
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_add_x10_device(x10_config):
|
|
||||||
"""Add X10 device."""
|
|
||||||
housecode = x10_config[CONF_HOUSECODE]
|
|
||||||
unitcode = x10_config[CONF_UNITCODE]
|
|
||||||
platform = x10_config[CONF_PLATFORM]
|
|
||||||
steps = x10_config.get(CONF_DIM_STEPS, 22)
|
|
||||||
x10_type = "on_off"
|
|
||||||
if platform == "light":
|
|
||||||
x10_type = "dimmable"
|
|
||||||
elif platform == "binary_sensor":
|
|
||||||
x10_type = "sensor"
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
|
|
||||||
)
|
|
||||||
# This must be run in the event loop
|
|
||||||
devices.add_x10_device(housecode, unitcode, x10_type, steps)
|
|
||||||
|
|
||||||
async def async_remove_x10_device(housecode, unitcode):
|
|
||||||
"""Remove an X10 device and associated entities."""
|
|
||||||
address = create_x10_address(housecode, unitcode)
|
|
||||||
devices.pop(address)
|
|
||||||
await async_remove_ha_device(address)
|
|
||||||
|
|
||||||
async def async_remove_ha_device(address: Address, remove_all_refs: bool = False):
|
|
||||||
"""Remove the device and all entities from hass."""
|
|
||||||
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
|
|
||||||
async_dispatcher_send(hass, signal)
|
|
||||||
dev_registry = dr.async_get(hass)
|
|
||||||
device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))})
|
|
||||||
if device:
|
|
||||||
dev_registry.async_remove_device(device.id)
|
|
||||||
|
|
||||||
async def async_remove_insteon_device(
|
|
||||||
address: Address, remove_all_refs: bool = False
|
|
||||||
):
|
|
||||||
"""Remove the underlying Insteon device from the network."""
|
|
||||||
await devices.async_remove_device(
|
|
||||||
address=address, force=False, remove_all_refs=remove_all_refs
|
|
||||||
)
|
|
||||||
await async_srv_save_devices()
|
|
||||||
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
|
|
||||||
)
|
|
||||||
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN,
|
|
||||||
SRV_X10_ALL_UNITS_OFF,
|
|
||||||
async_srv_x10_all_units_off,
|
|
||||||
schema=X10_HOUSECODE_SCHEMA,
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN,
|
|
||||||
SRV_X10_ALL_LIGHTS_OFF,
|
|
||||||
async_srv_x10_all_lights_off,
|
|
||||||
schema=X10_HOUSECODE_SCHEMA,
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN,
|
|
||||||
SRV_X10_ALL_LIGHTS_ON,
|
|
||||||
async_srv_x10_all_lights_on,
|
|
||||||
schema=X10_HOUSECODE_SCHEMA,
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
|
|
||||||
)
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN,
|
|
||||||
SRV_ADD_DEFAULT_LINKS,
|
|
||||||
async_add_default_links,
|
|
||||||
schema=ADD_DEFAULT_LINKS_SCHEMA,
|
|
||||||
)
|
|
||||||
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
|
|
||||||
async_dispatcher_connect(
|
|
||||||
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
|
|
||||||
)
|
|
||||||
async_dispatcher_connect(
|
|
||||||
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
|
|
||||||
)
|
|
||||||
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
|
|
||||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
|
|
||||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_HA_DEVICE, async_remove_ha_device)
|
|
||||||
async_dispatcher_connect(
|
|
||||||
hass, SIGNAL_REMOVE_INSTEON_DEVICE, async_remove_insteon_device
|
|
||||||
)
|
|
||||||
_LOGGER.debug("Insteon Services registered")
|
|
||||||
|
|
||||||
|
|
||||||
def print_aldb_to_log(aldb):
|
def print_aldb_to_log(aldb):
|
||||||
"""Print the All-Link Database to the log file."""
|
"""Print the All-Link Database to the log file."""
|
||||||
logger = logging.getLogger(f"{__name__}.links")
|
logger = logging.getLogger(f"{__name__}.links")
|
||||||
|
@@ -26,6 +26,7 @@ from homeassistant.helpers import (
|
|||||||
device_registry as dr,
|
device_registry as dr,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
@@ -46,7 +47,7 @@ from .const import (
|
|||||||
)
|
)
|
||||||
from .helpers import _categorize_nodes, _categorize_programs
|
from .helpers import _categorize_nodes, _categorize_programs
|
||||||
from .models import IsyConfigEntry, IsyData
|
from .models import IsyConfigEntry, IsyData
|
||||||
from .services import async_setup_services, async_unload_services
|
from .services import async_setup_services
|
||||||
from .util import _async_cleanup_registry_entries
|
from .util import _async_cleanup_registry_entries
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
@@ -55,6 +56,14 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up the ISY 994 integration."""
|
||||||
|
|
||||||
|
async_setup_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: IsyConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: IsyConfigEntry) -> bool:
|
||||||
"""Set up the ISY 994 integration."""
|
"""Set up the ISY 994 integration."""
|
||||||
isy_config = entry.data
|
isy_config = entry.data
|
||||||
@@ -167,9 +176,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: IsyConfigEntry) -> bool:
|
|||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop_auto_update)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop_auto_update)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Register Integration-wide Services:
|
|
||||||
async_setup_services(hass)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@@ -221,9 +227,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: IsyConfigEntry) -> bool
|
|||||||
_LOGGER.debug("ISY Stopping Event Stream and automatic updates")
|
_LOGGER.debug("ISY Stopping Event Stream and automatic updates")
|
||||||
entry.runtime_data.root.websocket.stop()
|
entry.runtime_data.root.websocket.stop()
|
||||||
|
|
||||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
|
||||||
async_unload_services(hass)
|
|
||||||
|
|
||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
|
|
||||||
|
@@ -137,10 +137,6 @@ def async_get_entities(hass: HomeAssistant) -> dict[str, Entity]:
|
|||||||
@callback
|
@callback
|
||||||
def async_setup_services(hass: HomeAssistant) -> None:
|
def async_setup_services(hass: HomeAssistant) -> None:
|
||||||
"""Create and register services for the ISY integration."""
|
"""Create and register services for the ISY integration."""
|
||||||
existing_services = hass.services.async_services_for_domain(DOMAIN)
|
|
||||||
if existing_services and SERVICE_SEND_PROGRAM_COMMAND in existing_services:
|
|
||||||
# Integration-level services have already been added. Return.
|
|
||||||
return
|
|
||||||
|
|
||||||
async def async_send_program_command_service_handler(service: ServiceCall) -> None:
|
async def async_send_program_command_service_handler(service: ServiceCall) -> None:
|
||||||
"""Handle a send program command service call."""
|
"""Handle a send program command service call."""
|
||||||
@@ -230,18 +226,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
|||||||
schema=cv.make_entity_service_schema(SERVICE_RENAME_NODE_SCHEMA),
|
schema=cv.make_entity_service_schema(SERVICE_RENAME_NODE_SCHEMA),
|
||||||
service_func=_async_rename_node,
|
service_func=_async_rename_node,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_unload_services(hass: HomeAssistant) -> None:
|
|
||||||
"""Unload services for the ISY integration."""
|
|
||||||
existing_services = hass.services.async_services_for_domain(DOMAIN)
|
|
||||||
if not existing_services or SERVICE_SEND_PROGRAM_COMMAND not in existing_services:
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.debug("Unloading ISY994 Services")
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND)
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND)
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND)
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_GET_ZWAVE_PARAMETER)
|
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SET_ZWAVE_PARAMETER)
|
|
||||||
|
@@ -30,7 +30,7 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
from .entity import JewishCalendarConfigEntry, JewishCalendarData
|
from .entity import JewishCalendarConfigEntry, JewishCalendarData
|
||||||
from .service import async_setup_services
|
from .services import async_setup_services
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||||
|
@@ -99,7 +99,7 @@ rules:
|
|||||||
status: exempt
|
status: exempt
|
||||||
comment: |
|
comment: |
|
||||||
Since all entities are configured manually, names are user-defined.
|
Since all entities are configured manually, names are user-defined.
|
||||||
exception-translations: todo
|
exception-translations: done
|
||||||
icon-translations: done
|
icon-translations: done
|
||||||
reconfiguration-flow: todo
|
reconfiguration-flow: todo
|
||||||
repair-issues: todo
|
repair-issues: todo
|
||||||
|
@@ -87,7 +87,9 @@ def get_knx_module(hass: HomeAssistant) -> KNXModule:
|
|||||||
try:
|
try:
|
||||||
return hass.data[KNX_MODULE_KEY]
|
return hass.data[KNX_MODULE_KEY]
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise HomeAssistantError("KNX entry not loaded") from err
|
raise HomeAssistantError(
|
||||||
|
translation_domain=DOMAIN, translation_key="integration_not_loaded"
|
||||||
|
) from err
|
||||||
|
|
||||||
|
|
||||||
SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema(
|
SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema(
|
||||||
@@ -166,7 +168,11 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
|||||||
removed_exposure = knx_module.service_exposures.pop(group_address)
|
removed_exposure = knx_module.service_exposures.pop(group_address)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise ServiceValidationError(
|
raise ServiceValidationError(
|
||||||
f"Could not find exposure for '{group_address}' to remove."
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="service_exposure_remove_not_found",
|
||||||
|
translation_placeholders={
|
||||||
|
"group_address": group_address,
|
||||||
|
},
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
removed_exposure.async_remove()
|
removed_exposure.async_remove()
|
||||||
@@ -234,13 +240,17 @@ async def service_send_to_knx_bus(call: ServiceCall) -> None:
|
|||||||
transcoder = DPTBase.parse_transcoder(attr_type)
|
transcoder = DPTBase.parse_transcoder(attr_type)
|
||||||
if transcoder is None:
|
if transcoder is None:
|
||||||
raise ServiceValidationError(
|
raise ServiceValidationError(
|
||||||
f"Invalid type for knx.send service: {attr_type}"
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="service_send_invalid_type",
|
||||||
|
translation_placeholders={"type": attr_type},
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
payload = transcoder.to_knx(attr_payload)
|
payload = transcoder.to_knx(attr_payload)
|
||||||
except ConversionError as err:
|
except ConversionError as err:
|
||||||
raise ServiceValidationError(
|
raise ServiceValidationError(
|
||||||
f"Invalid payload for knx.send service: {err}"
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="service_send_invalid_payload",
|
||||||
|
translation_placeholders={"error": str(err)},
|
||||||
) from err
|
) from err
|
||||||
elif isinstance(attr_payload, int):
|
elif isinstance(attr_payload, int):
|
||||||
payload = DPTBinary(attr_payload)
|
payload = DPTBinary(attr_payload)
|
||||||
|
@@ -143,6 +143,20 @@
|
|||||||
"unsupported_tunnel_type": "Selected tunneling type not supported by gateway."
|
"unsupported_tunnel_type": "Selected tunneling type not supported by gateway."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"integration_not_loaded": {
|
||||||
|
"message": "KNX integration is not loaded."
|
||||||
|
},
|
||||||
|
"service_exposure_remove_not_found": {
|
||||||
|
"message": "Could not find exposure for `{group_address}` to remove."
|
||||||
|
},
|
||||||
|
"service_send_invalid_payload": {
|
||||||
|
"message": "Invalid payload for `knx.send` service. {error}"
|
||||||
|
},
|
||||||
|
"service_send_invalid_type": {
|
||||||
|
"message": "Invalid type for `knx.send` service: {type}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"options": {
|
"options": {
|
||||||
"step": {
|
"step": {
|
||||||
"init": {
|
"init": {
|
||||||
|
@@ -12,5 +12,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pyatmo"],
|
"loggers": ["pyatmo"],
|
||||||
"requirements": ["pyatmo==9.2.0"]
|
"requirements": ["pyatmo==9.2.1"]
|
||||||
}
|
}
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/nextbus",
|
"documentation": "https://www.home-assistant.io/integrations/nextbus",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["py_nextbus"],
|
"loggers": ["py_nextbus"],
|
||||||
"requirements": ["py-nextbusnext==2.1.2"]
|
"requirements": ["py-nextbusnext==2.2.0"]
|
||||||
}
|
}
|
||||||
|
@@ -7,8 +7,6 @@ from pyrail.models import StationDetails
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import Platform
|
|
||||||
from homeassistant.helpers import entity_registry as er
|
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.selector import (
|
from homeassistant.helpers.selector import (
|
||||||
BooleanSelector,
|
BooleanSelector,
|
||||||
@@ -22,7 +20,6 @@ from .const import (
|
|||||||
CONF_EXCLUDE_VIAS,
|
CONF_EXCLUDE_VIAS,
|
||||||
CONF_SHOW_ON_MAP,
|
CONF_SHOW_ON_MAP,
|
||||||
CONF_STATION_FROM,
|
CONF_STATION_FROM,
|
||||||
CONF_STATION_LIVE,
|
|
||||||
CONF_STATION_TO,
|
CONF_STATION_TO,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
@@ -115,68 +112,6 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
|
||||||
"""Import configuration from yaml."""
|
|
||||||
try:
|
|
||||||
self.stations = await self._fetch_stations()
|
|
||||||
except CannotConnect:
|
|
||||||
return self.async_abort(reason="api_unavailable")
|
|
||||||
|
|
||||||
station_from = None
|
|
||||||
station_to = None
|
|
||||||
station_live = None
|
|
||||||
for station in self.stations:
|
|
||||||
if user_input[CONF_STATION_FROM] in (
|
|
||||||
station.standard_name,
|
|
||||||
station.name,
|
|
||||||
):
|
|
||||||
station_from = station
|
|
||||||
if user_input[CONF_STATION_TO] in (
|
|
||||||
station.standard_name,
|
|
||||||
station.name,
|
|
||||||
):
|
|
||||||
station_to = station
|
|
||||||
if CONF_STATION_LIVE in user_input and user_input[CONF_STATION_LIVE] in (
|
|
||||||
station.standard_name,
|
|
||||||
station.name,
|
|
||||||
):
|
|
||||||
station_live = station
|
|
||||||
|
|
||||||
if station_from is None or station_to is None:
|
|
||||||
return self.async_abort(reason="invalid_station")
|
|
||||||
if station_from == station_to:
|
|
||||||
return self.async_abort(reason="same_station")
|
|
||||||
|
|
||||||
# config flow uses id and not the standard name
|
|
||||||
user_input[CONF_STATION_FROM] = station_from.id
|
|
||||||
user_input[CONF_STATION_TO] = station_to.id
|
|
||||||
|
|
||||||
if station_live:
|
|
||||||
user_input[CONF_STATION_LIVE] = station_live.id
|
|
||||||
entity_registry = er.async_get(self.hass)
|
|
||||||
prefix = "live"
|
|
||||||
vias = "_excl_vias" if user_input.get(CONF_EXCLUDE_VIAS, False) else ""
|
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
|
||||||
Platform.SENSOR,
|
|
||||||
DOMAIN,
|
|
||||||
f"{prefix}_{station_live.standard_name}_{station_from.standard_name}_{station_to.standard_name}",
|
|
||||||
):
|
|
||||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live.id}_{station_from.id}_{station_to.id}{vias}"
|
|
||||||
entity_registry.async_update_entity(
|
|
||||||
entity_id, new_unique_id=new_unique_id
|
|
||||||
)
|
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
|
||||||
Platform.SENSOR,
|
|
||||||
DOMAIN,
|
|
||||||
f"{prefix}_{station_live.name}_{station_from.name}_{station_to.name}",
|
|
||||||
):
|
|
||||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live.id}_{station_from.id}_{station_to.id}{vias}"
|
|
||||||
entity_registry.async_update_entity(
|
|
||||||
entity_id, new_unique_id=new_unique_id
|
|
||||||
)
|
|
||||||
|
|
||||||
return await self.async_step_user(user_input)
|
|
||||||
|
|
||||||
|
|
||||||
class CannotConnect(Exception):
|
class CannotConnect(Exception):
|
||||||
"""Error to indicate we cannot connect to NMBS."""
|
"""Error to indicate we cannot connect to NMBS."""
|
||||||
|
@@ -8,30 +8,19 @@ from typing import Any
|
|||||||
|
|
||||||
from pyrail import iRail
|
from pyrail import iRail
|
||||||
from pyrail.models import ConnectionDetails, LiveboardDeparture, StationDetails
|
from pyrail.models import ConnectionDetails, LiveboardDeparture, StationDetails
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import SensorEntity
|
||||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
from homeassistant.config_entries import ConfigEntry
|
||||||
SensorEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_LATITUDE,
|
ATTR_LATITUDE,
|
||||||
ATTR_LONGITUDE,
|
ATTR_LONGITUDE,
|
||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
CONF_PLATFORM,
|
|
||||||
CONF_SHOW_ON_MAP,
|
CONF_SHOW_ON_MAP,
|
||||||
UnitOfTime,
|
UnitOfTime,
|
||||||
)
|
)
|
||||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.entity_platform import (
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
AddConfigEntryEntitiesCallback,
|
|
||||||
AddEntitiesCallback,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from .const import ( # noqa: F401
|
from .const import ( # noqa: F401
|
||||||
@@ -47,22 +36,9 @@ from .const import ( # noqa: F401
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_NAME = "NMBS"
|
|
||||||
|
|
||||||
DEFAULT_ICON = "mdi:train"
|
DEFAULT_ICON = "mdi:train"
|
||||||
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
|
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
|
||||||
|
|
||||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_STATION_FROM): cv.string,
|
|
||||||
vol.Required(CONF_STATION_TO): cv.string,
|
|
||||||
vol.Optional(CONF_STATION_LIVE): cv.string,
|
|
||||||
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
|
|
||||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
|
||||||
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_time_until(departure_time: datetime | None = None):
|
def get_time_until(departure_time: datetime | None = None):
|
||||||
"""Calculate the time between now and a train's departure time."""
|
"""Calculate the time between now and a train's departure time."""
|
||||||
@@ -85,71 +61,6 @@ def get_ride_duration(departure_time: datetime, arrival_time: datetime, delay=0)
|
|||||||
return duration_time + get_delay_in_minutes(delay)
|
return duration_time + get_delay_in_minutes(delay)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
async_add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set up the NMBS sensor with iRail API."""
|
|
||||||
|
|
||||||
if config[CONF_PLATFORM] == DOMAIN:
|
|
||||||
if CONF_SHOW_ON_MAP not in config:
|
|
||||||
config[CONF_SHOW_ON_MAP] = False
|
|
||||||
if CONF_EXCLUDE_VIAS not in config:
|
|
||||||
config[CONF_EXCLUDE_VIAS] = False
|
|
||||||
|
|
||||||
station_types = [CONF_STATION_FROM, CONF_STATION_TO, CONF_STATION_LIVE]
|
|
||||||
|
|
||||||
for station_type in station_types:
|
|
||||||
station = (
|
|
||||||
find_station_by_name(hass, config[station_type])
|
|
||||||
if station_type in config
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
if station is None and station_type in config:
|
|
||||||
async_create_issue(
|
|
||||||
hass,
|
|
||||||
DOMAIN,
|
|
||||||
"deprecated_yaml_import_issue_station_not_found",
|
|
||||||
breaks_in_ha_version="2025.7.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_yaml_import_issue_station_not_found",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "NMBS",
|
|
||||||
"station_name": config[station_type],
|
|
||||||
"url": "/config/integrations/dashboard/add?domain=nmbs",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
hass.async_create_task(
|
|
||||||
hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data=config,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async_create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_yaml_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.7.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_yaml",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "NMBS",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
@@ -336,7 +247,6 @@ class NMBSSensor(SensorEntity):
|
|||||||
|
|
||||||
delay = get_delay_in_minutes(self._attrs.departure.delay)
|
delay = get_delay_in_minutes(self._attrs.departure.delay)
|
||||||
departure = get_time_until(self._attrs.departure.time)
|
departure = get_time_until(self._attrs.departure.time)
|
||||||
canceled = self._attrs.departure.canceled
|
|
||||||
|
|
||||||
attrs = {
|
attrs = {
|
||||||
"destination": self._attrs.departure.station,
|
"destination": self._attrs.departure.station,
|
||||||
@@ -346,14 +256,13 @@ class NMBSSensor(SensorEntity):
|
|||||||
"vehicle_id": self._attrs.departure.vehicle,
|
"vehicle_id": self._attrs.departure.vehicle,
|
||||||
}
|
}
|
||||||
|
|
||||||
if not canceled:
|
attrs["canceled"] = self._attrs.departure.canceled
|
||||||
attrs["departure"] = f"In {departure} minutes"
|
if attrs["canceled"]:
|
||||||
attrs["departure_minutes"] = departure
|
|
||||||
attrs["canceled"] = False
|
|
||||||
else:
|
|
||||||
attrs["departure"] = None
|
attrs["departure"] = None
|
||||||
attrs["departure_minutes"] = None
|
attrs["departure_minutes"] = None
|
||||||
attrs["canceled"] = True
|
else:
|
||||||
|
attrs["departure"] = f"In {departure} minutes"
|
||||||
|
attrs["departure_minutes"] = departure
|
||||||
|
|
||||||
if self._show_on_map and self.station_coordinates:
|
if self._show_on_map and self.station_coordinates:
|
||||||
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
|
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
|
||||||
@@ -369,9 +278,8 @@ class NMBSSensor(SensorEntity):
|
|||||||
via.timebetween
|
via.timebetween
|
||||||
) + get_delay_in_minutes(via.departure.delay)
|
) + get_delay_in_minutes(via.departure.delay)
|
||||||
|
|
||||||
if delay > 0:
|
attrs["delay"] = f"{delay} minutes"
|
||||||
attrs["delay"] = f"{delay} minutes"
|
attrs["delay_minutes"] = delay
|
||||||
attrs["delay_minutes"] = delay
|
|
||||||
|
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
@@ -25,11 +25,5 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"issues": {
|
|
||||||
"deprecated_yaml_import_issue_station_not_found": {
|
|
||||||
"title": "The {integration_title} YAML configuration import failed",
|
|
||||||
"description": "Configuring {integration_title} using YAML is being removed but there was a problem importing your YAML configuration.\n\nThe used station \"{station_name}\" could not be found. Fix it or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,30 +1,25 @@
|
|||||||
"""The NZBGet integration."""
|
"""The NZBGet integration."""
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import (
|
from .const import DATA_COORDINATOR, DATA_UNDO_UPDATE_LISTENER, DOMAIN
|
||||||
ATTR_SPEED,
|
|
||||||
DATA_COORDINATOR,
|
|
||||||
DATA_UNDO_UPDATE_LISTENER,
|
|
||||||
DEFAULT_SPEED_LIMIT,
|
|
||||||
DOMAIN,
|
|
||||||
SERVICE_PAUSE,
|
|
||||||
SERVICE_RESUME,
|
|
||||||
SERVICE_SET_SPEED,
|
|
||||||
)
|
|
||||||
from .coordinator import NZBGetDataUpdateCoordinator
|
from .coordinator import NZBGetDataUpdateCoordinator
|
||||||
|
from .services import async_register_services
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||||
|
|
||||||
|
|
||||||
SPEED_LIMIT_SCHEMA = vol.Schema(
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
{vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.positive_int}
|
"""Set up NZBGet integration."""
|
||||||
)
|
|
||||||
|
async_register_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
@@ -44,8 +39,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
_async_register_services(hass, coordinator)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@@ -60,31 +53,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
|
|
||||||
def _async_register_services(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
coordinator: NZBGetDataUpdateCoordinator,
|
|
||||||
) -> None:
|
|
||||||
"""Register integration-level services."""
|
|
||||||
|
|
||||||
def pause(call: ServiceCall) -> None:
|
|
||||||
"""Service call to pause downloads in NZBGet."""
|
|
||||||
coordinator.nzbget.pausedownload()
|
|
||||||
|
|
||||||
def resume(call: ServiceCall) -> None:
|
|
||||||
"""Service call to resume downloads in NZBGet."""
|
|
||||||
coordinator.nzbget.resumedownload()
|
|
||||||
|
|
||||||
def set_speed(call: ServiceCall) -> None:
|
|
||||||
"""Service call to rate limit speeds in NZBGet."""
|
|
||||||
coordinator.nzbget.rate(call.data[ATTR_SPEED])
|
|
||||||
|
|
||||||
hass.services.async_register(DOMAIN, SERVICE_PAUSE, pause, schema=vol.Schema({}))
|
|
||||||
hass.services.async_register(DOMAIN, SERVICE_RESUME, resume, schema=vol.Schema({}))
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SERVICE_SET_SPEED, set_speed, schema=SPEED_LIMIT_SCHEMA
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||||
"""Handle options update."""
|
"""Handle options update."""
|
||||||
await hass.config_entries.async_reload(entry.entry_id)
|
await hass.config_entries.async_reload(entry.entry_id)
|
||||||
|
58
homeassistant/components/nzbget/services.py
Normal file
58
homeassistant/components/nzbget/services.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""The NZBGet integration."""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant, ServiceCall
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
ATTR_SPEED,
|
||||||
|
DATA_COORDINATOR,
|
||||||
|
DEFAULT_SPEED_LIMIT,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_PAUSE,
|
||||||
|
SERVICE_RESUME,
|
||||||
|
SERVICE_SET_SPEED,
|
||||||
|
)
|
||||||
|
from .coordinator import NZBGetDataUpdateCoordinator
|
||||||
|
|
||||||
|
SPEED_LIMIT_SCHEMA = vol.Schema(
|
||||||
|
{vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.positive_int}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_coordinator(call: ServiceCall) -> NZBGetDataUpdateCoordinator:
|
||||||
|
"""Service call to pause downloads in NZBGet."""
|
||||||
|
entries = call.hass.config_entries.async_loaded_entries(DOMAIN)
|
||||||
|
if not entries:
|
||||||
|
raise ServiceValidationError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_config_entry",
|
||||||
|
)
|
||||||
|
return call.hass.data[DOMAIN][entries[0].entry_id][DATA_COORDINATOR]
|
||||||
|
|
||||||
|
|
||||||
|
def pause(call: ServiceCall) -> None:
|
||||||
|
"""Service call to pause downloads in NZBGet."""
|
||||||
|
_get_coordinator(call).nzbget.pausedownload()
|
||||||
|
|
||||||
|
|
||||||
|
def resume(call: ServiceCall) -> None:
|
||||||
|
"""Service call to resume downloads in NZBGet."""
|
||||||
|
_get_coordinator(call).nzbget.resumedownload()
|
||||||
|
|
||||||
|
|
||||||
|
def set_speed(call: ServiceCall) -> None:
|
||||||
|
"""Service call to rate limit speeds in NZBGet."""
|
||||||
|
_get_coordinator(call).nzbget.rate(call.data[ATTR_SPEED])
|
||||||
|
|
||||||
|
|
||||||
|
def async_register_services(hass: HomeAssistant) -> None:
|
||||||
|
"""Register integration-level services."""
|
||||||
|
|
||||||
|
hass.services.async_register(DOMAIN, SERVICE_PAUSE, pause, schema=vol.Schema({}))
|
||||||
|
hass.services.async_register(DOMAIN, SERVICE_RESUME, resume, schema=vol.Schema({}))
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SERVICE_SET_SPEED, set_speed, schema=SPEED_LIMIT_SCHEMA
|
||||||
|
)
|
@@ -64,6 +64,11 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"invalid_config_entry": {
|
||||||
|
"message": "Config entry not found or not loaded!"
|
||||||
|
}
|
||||||
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"pause": {
|
"pause": {
|
||||||
"name": "[%key:common::action::pause%]",
|
"name": "[%key:common::action::pause%]",
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/ollama",
|
"documentation": "https://www.home-assistant.io/integrations/ollama",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["ollama==0.4.7"]
|
"requirements": ["ollama==0.5.1"]
|
||||||
}
|
}
|
||||||
|
@@ -121,11 +121,10 @@ def async_register_services(hass: HomeAssistant) -> None:
|
|||||||
return {"files": [asdict(item_result) for item_result in upload_results]}
|
return {"files": [asdict(item_result) for item_result in upload_results]}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE):
|
hass.services.async_register(
|
||||||
hass.services.async_register(
|
DOMAIN,
|
||||||
DOMAIN,
|
UPLOAD_SERVICE,
|
||||||
UPLOAD_SERVICE,
|
async_handle_upload,
|
||||||
async_handle_upload,
|
schema=UPLOAD_SERVICE_SCHEMA,
|
||||||
schema=UPLOAD_SERVICE_SCHEMA,
|
supports_response=SupportsResponse.OPTIONAL,
|
||||||
supports_response=SupportsResponse.OPTIONAL,
|
)
|
||||||
)
|
|
||||||
|
@@ -13,7 +13,7 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||||
"requirements": ["pyoverkiz==1.17.1"],
|
"requirements": ["pyoverkiz==1.17.2"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_kizbox._tcp.local.",
|
"type": "_kizbox._tcp.local.",
|
||||||
|
@@ -5,14 +5,25 @@ from python_picnic_api2 import PicnicAPI
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY_CODE, Platform
|
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY_CODE, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import CONF_API, CONF_COORDINATOR, DOMAIN
|
from .const import CONF_API, CONF_COORDINATOR, DOMAIN
|
||||||
from .coordinator import PicnicUpdateCoordinator
|
from .coordinator import PicnicUpdateCoordinator
|
||||||
from .services import async_register_services
|
from .services import async_register_services
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
PLATFORMS = [Platform.SENSOR, Platform.TODO]
|
PLATFORMS = [Platform.SENSOR, Platform.TODO]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up Picnic integration."""
|
||||||
|
|
||||||
|
await async_register_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def create_picnic_client(entry: ConfigEntry):
|
def create_picnic_client(entry: ConfigEntry):
|
||||||
"""Create an instance of the PicnicAPI client."""
|
"""Create an instance of the PicnicAPI client."""
|
||||||
return PicnicAPI(
|
return PicnicAPI(
|
||||||
@@ -37,9 +48,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
# Register the services
|
|
||||||
await async_register_services(hass)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/picnic",
|
"documentation": "https://www.home-assistant.io/integrations/picnic",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["python_picnic_api2"],
|
"loggers": ["python_picnic_api2"],
|
||||||
"requirements": ["python-picnic-api2==1.2.4"]
|
"requirements": ["python-picnic-api2==1.3.1"]
|
||||||
}
|
}
|
||||||
|
@@ -29,9 +29,6 @@ class PicnicServiceException(Exception):
|
|||||||
async def async_register_services(hass: HomeAssistant) -> None:
|
async def async_register_services(hass: HomeAssistant) -> None:
|
||||||
"""Register services for the Picnic integration, if not registered yet."""
|
"""Register services for the Picnic integration, if not registered yet."""
|
||||||
|
|
||||||
if hass.services.has_service(DOMAIN, SERVICE_ADD_PRODUCT_TO_CART):
|
|
||||||
return
|
|
||||||
|
|
||||||
async def async_add_product_service(call: ServiceCall):
|
async def async_add_product_service(call: ServiceCall):
|
||||||
api_client = await get_api_client(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
api_client = await get_api_client(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||||
await handle_add_product(hass, api_client, call)
|
await handle_add_product(hass, api_client, call)
|
||||||
|
@@ -13,5 +13,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aiokem"],
|
"loggers": ["aiokem"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["aiokem==0.5.12"]
|
"requirements": ["aiokem==1.0.1"]
|
||||||
}
|
}
|
||||||
|
@@ -150,6 +150,10 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
if host.api.new_devices and config_entry.state == ConfigEntryState.LOADED:
|
if host.api.new_devices and config_entry.state == ConfigEntryState.LOADED:
|
||||||
# Their are new cameras/chimes connected, reload to add them.
|
# Their are new cameras/chimes connected, reload to add them.
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Reloading Reolink %s to add new device (capabilities)",
|
||||||
|
host.api.nvr_name,
|
||||||
|
)
|
||||||
hass.async_create_task(
|
hass.async_create_task(
|
||||||
hass.config_entries.async_reload(config_entry.entry_id)
|
hass.config_entries.async_reload(config_entry.entry_id)
|
||||||
)
|
)
|
||||||
|
@@ -194,6 +194,13 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
raise AbortFlow("already_configured")
|
raise AbortFlow("already_configured")
|
||||||
|
|
||||||
|
if existing_entry and existing_entry.data[CONF_HOST] != discovery_info.ip:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Reolink DHCP reported new IP '%s', updating from old IP '%s'",
|
||||||
|
discovery_info.ip,
|
||||||
|
existing_entry.data[CONF_HOST],
|
||||||
|
)
|
||||||
|
|
||||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||||
|
|
||||||
self.context["title_placeholders"] = {
|
self.context["title_placeholders"] = {
|
||||||
|
@@ -19,5 +19,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["reolink_aio"],
|
"loggers": ["reolink_aio"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["reolink-aio==0.13.4"]
|
"requirements": ["reolink-aio==0.13.5"]
|
||||||
}
|
}
|
||||||
|
@@ -636,14 +636,21 @@ class SamsungTVWSBridge(
|
|||||||
)
|
)
|
||||||
self._remote = None
|
self._remote = None
|
||||||
except ConnectionFailure as err:
|
except ConnectionFailure as err:
|
||||||
LOGGER.warning(
|
error_details = err.args[0]
|
||||||
(
|
if "ms.channel.timeOut" in (error_details := repr(err)):
|
||||||
|
# The websocket was connected, but the TV is probably asleep
|
||||||
|
LOGGER.debug(
|
||||||
|
"Channel timeout occurred trying to get remote for %s: %s",
|
||||||
|
self.host,
|
||||||
|
error_details,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOGGER.warning(
|
||||||
"Unexpected ConnectionFailure trying to get remote for %s, "
|
"Unexpected ConnectionFailure trying to get remote for %s, "
|
||||||
"please report this issue: %s"
|
"please report this issue: %s",
|
||||||
),
|
self.host,
|
||||||
self.host,
|
error_details,
|
||||||
repr(err),
|
)
|
||||||
)
|
|
||||||
self._remote = None
|
self._remote = None
|
||||||
except (WebSocketException, AsyncioTimeoutError, OSError) as err:
|
except (WebSocketException, AsyncioTimeoutError, OSError) as err:
|
||||||
LOGGER.debug("Failed to get remote for %s: %s", self.host, repr(err))
|
LOGGER.debug("Failed to get remote for %s: %s", self.host, repr(err))
|
||||||
|
@@ -39,7 +39,7 @@ class SamsungTVDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.bridge = bridge
|
self.bridge = bridge
|
||||||
self.is_on: bool | None = False
|
self.is_on: bool | None = None
|
||||||
self.async_extra_update: Callable[[], Coroutine[Any, Any, None]] | None = None
|
self.async_extra_update: Callable[[], Coroutine[Any, Any, None]] | None = None
|
||||||
|
|
||||||
async def _async_update_data(self) -> None:
|
async def _async_update_data(self) -> None:
|
||||||
@@ -52,7 +52,12 @@ class SamsungTVDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
|||||||
else:
|
else:
|
||||||
self.is_on = await self.bridge.async_is_on()
|
self.is_on = await self.bridge.async_is_on()
|
||||||
if self.is_on != old_state:
|
if self.is_on != old_state:
|
||||||
LOGGER.debug("TV %s state updated to %s", self.bridge.host, self.is_on)
|
LOGGER.debug(
|
||||||
|
"TV %s state updated from %s to %s",
|
||||||
|
self.bridge.host,
|
||||||
|
old_state,
|
||||||
|
self.is_on,
|
||||||
|
)
|
||||||
|
|
||||||
if self.async_extra_update:
|
if self.async_extra_update:
|
||||||
await self.async_extra_update()
|
await self.async_extra_update()
|
||||||
|
@@ -7,7 +7,7 @@ from dataclasses import dataclass
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import TYPE_CHECKING, Any, Final
|
from typing import TYPE_CHECKING, Any, Final
|
||||||
|
|
||||||
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY, RPC_GENERATIONS
|
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERATIONS
|
||||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||||
|
|
||||||
from homeassistant.components.button import (
|
from homeassistant.components.button import (
|
||||||
@@ -62,7 +62,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
|||||||
translation_key="self_test",
|
translation_key="self_test",
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
press_action="trigger_shelly_gas_self_test",
|
press_action="trigger_shelly_gas_self_test",
|
||||||
supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS,
|
supported=lambda coordinator: coordinator.model in SHELLY_GAS_MODELS,
|
||||||
),
|
),
|
||||||
ShellyButtonDescription[ShellyBlockCoordinator](
|
ShellyButtonDescription[ShellyBlockCoordinator](
|
||||||
key="mute",
|
key="mute",
|
||||||
@@ -70,7 +70,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
|||||||
translation_key="mute",
|
translation_key="mute",
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
press_action="trigger_shelly_gas_mute",
|
press_action="trigger_shelly_gas_mute",
|
||||||
supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS,
|
supported=lambda coordinator: coordinator.model in SHELLY_GAS_MODELS,
|
||||||
),
|
),
|
||||||
ShellyButtonDescription[ShellyBlockCoordinator](
|
ShellyButtonDescription[ShellyBlockCoordinator](
|
||||||
key="unmute",
|
key="unmute",
|
||||||
@@ -78,7 +78,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
|||||||
translation_key="unmute",
|
translation_key="unmute",
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
press_action="trigger_shelly_gas_unmute",
|
press_action="trigger_shelly_gas_unmute",
|
||||||
supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS,
|
supported=lambda coordinator: coordinator.model in SHELLY_GAS_MODELS,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
|||||||
translation_key="calibrate",
|
translation_key="calibrate",
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
press_action="trigger_blu_trv_calibration",
|
press_action="trigger_blu_trv_calibration",
|
||||||
supported=lambda coordinator: coordinator.device.model == MODEL_BLU_GATEWAY,
|
supported=lambda coordinator: coordinator.model == MODEL_BLU_GATEWAY_G3,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -160,6 +160,7 @@ async def async_setup_entry(
|
|||||||
ShellyBluTrvButton(coordinator, button, id_)
|
ShellyBluTrvButton(coordinator, button, id_)
|
||||||
for id_ in blutrv_key_ids
|
for id_ in blutrv_key_ids
|
||||||
for button in BLU_TRV_BUTTONS
|
for button in BLU_TRV_BUTTONS
|
||||||
|
if button.supported(coordinator)
|
||||||
)
|
)
|
||||||
|
|
||||||
async_add_entities(entities)
|
async_add_entities(entities)
|
||||||
|
@@ -218,5 +218,6 @@ class SmaConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
vol.Required(CONF_PASSWORD): cv.string,
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
description_placeholders={CONF_HOST: self._data[CONF_HOST]},
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
@@ -32,6 +32,16 @@
|
|||||||
},
|
},
|
||||||
"description": "Enter your SMA device information.",
|
"description": "Enter your SMA device information.",
|
||||||
"title": "Set up SMA Solar"
|
"title": "Set up SMA Solar"
|
||||||
|
},
|
||||||
|
"discovery_confirm": {
|
||||||
|
"title": "[%key:component::sma::config::step::user::title]",
|
||||||
|
"description": "Do you want to setup the discovered SMA ({host})?",
|
||||||
|
"data": {
|
||||||
|
"group": "[%key:component::sma::config::step::user::data::group]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
|
"ssl": "[%key:common::config_flow::data::ssl%]",
|
||||||
|
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -12,7 +12,7 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["pysmlight==0.2.5"],
|
"requirements": ["pysmlight==0.2.6"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_slzb-06._tcp.local."
|
"type": "_slzb-06._tcp.local."
|
||||||
|
@@ -130,11 +130,11 @@ async def async_generate_speaker_info(
|
|||||||
value = getattr(speaker, attrib)
|
value = getattr(speaker, attrib)
|
||||||
payload[attrib] = get_contents(value)
|
payload[attrib] = get_contents(value)
|
||||||
|
|
||||||
payload["enabled_entities"] = {
|
payload["enabled_entities"] = sorted(
|
||||||
entity_id
|
entity_id
|
||||||
for entity_id, s in hass.data[DATA_SONOS].entity_id_mappings.items()
|
for entity_id, s in hass.data[DATA_SONOS].entity_id_mappings.items()
|
||||||
if s is speaker
|
if s is speaker
|
||||||
}
|
)
|
||||||
payload["media"] = await async_generate_media_info(hass, speaker)
|
payload["media"] = await async_generate_media_info(hass, speaker)
|
||||||
payload["activity_stats"] = speaker.activity_stats.report()
|
payload["activity_stats"] = speaker.activity_stats.report()
|
||||||
payload["event_stats"] = speaker.event_stats.report()
|
payload["event_stats"] = speaker.event_stats.report()
|
||||||
|
@@ -367,7 +367,9 @@ class SwitchbotOptionsFlowHandler(OptionsFlow):
|
|||||||
),
|
),
|
||||||
): int
|
): int
|
||||||
}
|
}
|
||||||
if self.config_entry.data.get(CONF_SENSOR_TYPE) == SupportedModels.LOCK_PRO:
|
if self.config_entry.data.get(CONF_SENSOR_TYPE, "").startswith(
|
||||||
|
SupportedModels.LOCK
|
||||||
|
):
|
||||||
options.update(
|
options.update(
|
||||||
{
|
{
|
||||||
vol.Optional(
|
vol.Optional(
|
||||||
|
@@ -12,7 +12,8 @@ from synology_dsm.exceptions import SynologyDSMNotLoggedInException
|
|||||||
from homeassistant.const import CONF_MAC, CONF_SCAN_INTERVAL, CONF_VERIFY_SSL
|
from homeassistant.const import CONF_MAC, CONF_SCAN_INTERVAL, CONF_VERIFY_SSL
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .common import SynoApi, raise_config_entry_auth_error
|
from .common import SynoApi, raise_config_entry_auth_error
|
||||||
from .const import (
|
from .const import (
|
||||||
@@ -34,10 +35,20 @@ from .coordinator import (
|
|||||||
SynologyDSMData,
|
SynologyDSMData,
|
||||||
SynologyDSMSwitchUpdateCoordinator,
|
SynologyDSMSwitchUpdateCoordinator,
|
||||||
)
|
)
|
||||||
from .service import async_setup_services
|
from .services import async_setup_services
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up the Synology DSM component."""
|
||||||
|
|
||||||
|
await async_setup_services(hass)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) -> bool:
|
||||||
"""Set up Synology DSM sensors."""
|
"""Set up Synology DSM sensors."""
|
||||||
@@ -89,9 +100,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry)
|
|||||||
details = EXCEPTION_UNKNOWN
|
details = EXCEPTION_UNKNOWN
|
||||||
raise ConfigEntryNotReady(details) from err
|
raise ConfigEntryNotReady(details) from err
|
||||||
|
|
||||||
# Services
|
|
||||||
await async_setup_services(hass)
|
|
||||||
|
|
||||||
# For SSDP compat
|
# For SSDP compat
|
||||||
if not entry.data.get(CONF_MAC):
|
if not entry.data.get(CONF_MAC):
|
||||||
hass.config_entries.async_update_entry(
|
hass.config_entries.async_update_entry(
|
||||||
|
@@ -205,7 +205,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryVehicleSensorEntityDescription, ...] = (
|
|||||||
key="charge_state_charging_state",
|
key="charge_state_charging_state",
|
||||||
polling=True,
|
polling=True,
|
||||||
streaming_listener=lambda vehicle, callback: vehicle.listen_DetailedChargeState(
|
streaming_listener=lambda vehicle, callback: vehicle.listen_DetailedChargeState(
|
||||||
lambda value: None if value is None else callback(value.lower())
|
lambda value: callback(None if value is None else CHARGE_STATES.get(value))
|
||||||
),
|
),
|
||||||
polling_value_fn=lambda value: CHARGE_STATES.get(str(value)),
|
polling_value_fn=lambda value: CHARGE_STATES.get(str(value)),
|
||||||
options=list(CHARGE_STATES.values()),
|
options=list(CHARGE_STATES.values()),
|
||||||
@@ -533,7 +533,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryVehicleSensorEntityDescription, ...] = (
|
|||||||
TeslemetryVehicleSensorEntityDescription(
|
TeslemetryVehicleSensorEntityDescription(
|
||||||
key="bms_state",
|
key="bms_state",
|
||||||
streaming_listener=lambda vehicle, callback: vehicle.listen_BMSState(
|
streaming_listener=lambda vehicle, callback: vehicle.listen_BMSState(
|
||||||
lambda value: None if value is None else callback(BMS_STATES.get(value))
|
lambda value: callback(None if value is None else BMS_STATES.get(value))
|
||||||
),
|
),
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=list(BMS_STATES.values()),
|
options=list(BMS_STATES.values()),
|
||||||
|
@@ -112,15 +112,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF]
|
SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF]
|
||||||
)
|
)
|
||||||
component.async_register_entity_service(
|
component.async_register_entity_service(
|
||||||
SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA, async_service_away_mode
|
SERVICE_SET_AWAY_MODE,
|
||||||
|
SET_AWAY_MODE_SCHEMA,
|
||||||
|
async_service_away_mode,
|
||||||
|
[WaterHeaterEntityFeature.AWAY_MODE],
|
||||||
)
|
)
|
||||||
component.async_register_entity_service(
|
component.async_register_entity_service(
|
||||||
SERVICE_SET_TEMPERATURE, SET_TEMPERATURE_SCHEMA, async_service_temperature_set
|
SERVICE_SET_TEMPERATURE,
|
||||||
|
SET_TEMPERATURE_SCHEMA,
|
||||||
|
async_service_temperature_set,
|
||||||
|
[WaterHeaterEntityFeature.TARGET_TEMPERATURE],
|
||||||
)
|
)
|
||||||
component.async_register_entity_service(
|
component.async_register_entity_service(
|
||||||
SERVICE_SET_OPERATION_MODE,
|
SERVICE_SET_OPERATION_MODE,
|
||||||
SET_OPERATION_MODE_SCHEMA,
|
SET_OPERATION_MODE_SCHEMA,
|
||||||
"async_handle_set_operation_mode",
|
"async_handle_set_operation_mode",
|
||||||
|
[WaterHeaterEntityFeature.OPERATION_MODE],
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@@ -6,7 +6,7 @@ aiodns==3.4.0
|
|||||||
aiohasupervisor==0.3.1
|
aiohasupervisor==0.3.1
|
||||||
aiohttp-asyncmdnsresolver==0.1.1
|
aiohttp-asyncmdnsresolver==0.1.1
|
||||||
aiohttp-fast-zlib==0.2.3
|
aiohttp-fast-zlib==0.2.3
|
||||||
aiohttp==3.12.6
|
aiohttp==3.12.7
|
||||||
aiohttp_cors==0.8.1
|
aiohttp_cors==0.8.1
|
||||||
aiousbwatcher==1.1.1
|
aiousbwatcher==1.1.1
|
||||||
aiozoneinfo==0.2.3
|
aiozoneinfo==0.2.3
|
||||||
@@ -32,7 +32,7 @@ cronsim==2.6
|
|||||||
cryptography==45.0.3
|
cryptography==45.0.3
|
||||||
dbus-fast==2.43.0
|
dbus-fast==2.43.0
|
||||||
fnv-hash-fast==1.5.0
|
fnv-hash-fast==1.5.0
|
||||||
go2rtc-client==0.1.3b0
|
go2rtc-client==0.2.1
|
||||||
ha-ffmpeg==3.2.2
|
ha-ffmpeg==3.2.2
|
||||||
habluetooth==3.48.2
|
habluetooth==3.48.2
|
||||||
hass-nabucasa==0.101.0
|
hass-nabucasa==0.101.0
|
||||||
@@ -66,7 +66,7 @@ securetar==2025.2.1
|
|||||||
SQLAlchemy==2.0.41
|
SQLAlchemy==2.0.41
|
||||||
standard-aifc==3.13.0
|
standard-aifc==3.13.0
|
||||||
standard-telnetlib==3.13.0
|
standard-telnetlib==3.13.0
|
||||||
typing-extensions>=4.13.0,<5.0
|
typing-extensions>=4.14.0,<5.0
|
||||||
ulid-transform==1.4.0
|
ulid-transform==1.4.0
|
||||||
urllib3>=1.26.5,<2
|
urllib3>=1.26.5,<2
|
||||||
uv==0.7.1
|
uv==0.7.1
|
||||||
@@ -88,9 +88,9 @@ httplib2>=0.19.0
|
|||||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||||
# upgrades intentionally. It is a large package to build from source and we
|
# upgrades intentionally. It is a large package to build from source and we
|
||||||
# want to ensure we have wheels built.
|
# want to ensure we have wheels built.
|
||||||
grpcio==1.72.0
|
grpcio==1.72.1
|
||||||
grpcio-status==1.72.0
|
grpcio-status==1.72.1
|
||||||
grpcio-reflection==1.72.0
|
grpcio-reflection==1.72.1
|
||||||
|
|
||||||
# This is a old unmaintained library and is replaced with pycryptodome
|
# This is a old unmaintained library and is replaced with pycryptodome
|
||||||
pycrypto==1000000000.0.0
|
pycrypto==1000000000.0.0
|
||||||
|
@@ -28,7 +28,7 @@ dependencies = [
|
|||||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||||
# Lib can be removed with 2025.11
|
# Lib can be removed with 2025.11
|
||||||
"aiohasupervisor==0.3.1",
|
"aiohasupervisor==0.3.1",
|
||||||
"aiohttp==3.12.6",
|
"aiohttp==3.12.7",
|
||||||
"aiohttp_cors==0.8.1",
|
"aiohttp_cors==0.8.1",
|
||||||
"aiohttp-fast-zlib==0.2.3",
|
"aiohttp-fast-zlib==0.2.3",
|
||||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||||
@@ -111,7 +111,7 @@ dependencies = [
|
|||||||
"SQLAlchemy==2.0.41",
|
"SQLAlchemy==2.0.41",
|
||||||
"standard-aifc==3.13.0",
|
"standard-aifc==3.13.0",
|
||||||
"standard-telnetlib==3.13.0",
|
"standard-telnetlib==3.13.0",
|
||||||
"typing-extensions>=4.13.0,<5.0",
|
"typing-extensions>=4.14.0,<5.0",
|
||||||
"ulid-transform==1.4.0",
|
"ulid-transform==1.4.0",
|
||||||
# Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503
|
# Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503
|
||||||
# Temporary setting an upper bound, to prevent compat issues with urllib3>=2
|
# Temporary setting an upper bound, to prevent compat issues with urllib3>=2
|
||||||
@@ -531,18 +531,20 @@ filterwarnings = [
|
|||||||
# https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0
|
# https://github.com/DataDog/datadogpy/pull/290 - >=0.23.0
|
||||||
"ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base",
|
"ignore:invalid escape sequence:SyntaxWarning:.*datadog.dogstatsd.base",
|
||||||
# https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0
|
# https://github.com/DataDog/datadogpy/pull/566/files - >=0.37.0
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:datadog.util.compat",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:datadog.util.compat",
|
||||||
# https://github.com/httplib2/httplib2/pull/226 - >=0.21.0
|
# https://github.com/httplib2/httplib2/pull/226 - >=0.21.0
|
||||||
"ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2",
|
"ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2",
|
||||||
# https://github.com/influxdata/influxdb-client-python/issues/603 >=1.45.0
|
|
||||||
# https://github.com/influxdata/influxdb-client-python/pull/652
|
|
||||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point",
|
|
||||||
# https://github.com/majuss/lupupy/pull/15 - >0.3.2
|
# https://github.com/majuss/lupupy/pull/15 - >0.3.2
|
||||||
"ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm",
|
"ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm",
|
||||||
# https://github.com/nextcord/nextcord/pull/1095 - >=3.0.0
|
# https://github.com/nextcord/nextcord/pull/1095 - >=3.0.0
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:nextcord.health_check",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:nextcord.health_check",
|
||||||
# https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0
|
# https://github.com/vacanza/python-holidays/discussions/1800 - >1.0.0
|
||||||
"ignore::DeprecationWarning:holidays",
|
"ignore::DeprecationWarning:holidays",
|
||||||
|
# https://github.com/ReactiveX/RxPY/pull/716 - >4.0.4
|
||||||
|
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:reactivex.internal.constants",
|
||||||
|
# https://github.com/postlund/pyatv/issues/2645 - >0.16.0
|
||||||
|
# https://github.com/postlund/pyatv/pull/2664
|
||||||
|
"ignore:Protobuf gencode .* exactly one major version older than the runtime version 6.* at pyatv:UserWarning:google.protobuf.runtime_version",
|
||||||
# https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0
|
# https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0
|
||||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol",
|
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol",
|
||||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol",
|
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol",
|
||||||
@@ -550,6 +552,8 @@ filterwarnings = [
|
|||||||
"ignore:functools.partial will be a method descriptor in future Python versions; wrap it in enum.member\\(\\) if you want to preserve the old behavior:FutureWarning:miio.miot_device",
|
"ignore:functools.partial will be a method descriptor in future Python versions; wrap it in enum.member\\(\\) if you want to preserve the old behavior:FutureWarning:miio.miot_device",
|
||||||
# https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0
|
# https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0
|
||||||
"ignore:invalid escape sequence:SyntaxWarning:.*stringcase",
|
"ignore:invalid escape sequence:SyntaxWarning:.*stringcase",
|
||||||
|
# https://github.com/xchwarze/samsung-tv-ws-api/pull/151 - >2.7.2 - 2024-12-06 # wrong stacklevel in aiohttp
|
||||||
|
"ignore:verify_ssl is deprecated, use ssl=False instead:DeprecationWarning:aiohttp.client",
|
||||||
|
|
||||||
# -- other
|
# -- other
|
||||||
# Locale changes might take some time to resolve upstream
|
# Locale changes might take some time to resolve upstream
|
||||||
@@ -581,6 +585,8 @@ filterwarnings = [
|
|||||||
"ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysnmp.smi.compiler",
|
"ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysnmp.smi.compiler",
|
||||||
# https://github.com/Python-roborock/python-roborock/issues/305 - 2.18.0 - 2025-04-06
|
# https://github.com/Python-roborock/python-roborock/issues/305 - 2.18.0 - 2025-04-06
|
||||||
"ignore:Callback API version 1 is deprecated, update to latest version:DeprecationWarning:roborock.cloud_api",
|
"ignore:Callback API version 1 is deprecated, update to latest version:DeprecationWarning:roborock.cloud_api",
|
||||||
|
# https://github.com/Teslemetry/python-tesla-fleet-api - v1.1.1 - 2025-05-29
|
||||||
|
"ignore:Protobuf gencode .* exactly one major version older than the runtime version 6.* at (car_server|common|errors|keys|managed_charging|signatures|universal_message|vcsec|vehicle):UserWarning:google.protobuf.runtime_version",
|
||||||
# https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10
|
# https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10
|
||||||
"ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const",
|
"ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const",
|
||||||
# New in aiohttp - v3.9.0
|
# New in aiohttp - v3.9.0
|
||||||
@@ -603,14 +609,12 @@ filterwarnings = [
|
|||||||
# https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18
|
# https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18
|
||||||
"ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty
|
"ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty
|
||||||
# - pkg_resources
|
# - pkg_resources
|
||||||
# https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20
|
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast",
|
|
||||||
# https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17
|
# https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:pysiaalarm.data.data",
|
||||||
# https://pypi.org/project/pybotvac/ - v0.0.26 - 2025-02-26
|
# https://pypi.org/project/pybotvac/ - v0.0.26 - 2025-02-26
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:pybotvac.version",
|
||||||
# https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21
|
# https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:pymystrom",
|
||||||
|
|
||||||
# -- New in Python 3.13
|
# -- New in Python 3.13
|
||||||
# https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11
|
# https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11
|
||||||
@@ -641,8 +645,6 @@ filterwarnings = [
|
|||||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models",
|
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models",
|
||||||
# https://pypi.org/project/enocean/ - v0.50.1 (installed) -> v0.60.1 - 2021-06-18
|
# https://pypi.org/project/enocean/ - v0.50.1 (installed) -> v0.60.1 - 2021-06-18
|
||||||
"ignore:It looks like you're using an HTML parser to parse an XML document:UserWarning:enocean.protocol.eep",
|
"ignore:It looks like you're using an HTML parser to parse an XML document:UserWarning:enocean.protocol.eep",
|
||||||
# https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28
|
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig",
|
|
||||||
# https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived)
|
# https://pypi.org/project/influxdb/ - v5.3.2 - 2024-04-18 (archived)
|
||||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol",
|
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb.line_protocol",
|
||||||
# https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark`
|
# https://pypi.org/project/lark-parser/ - v0.12.0 - 2021-08-30 -> moved to `lark`
|
||||||
@@ -658,7 +660,7 @@ filterwarnings = [
|
|||||||
# https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16
|
# https://pypi.org/project/opuslib/ - v3.0.1 - 2018-01-16
|
||||||
"ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder",
|
"ignore:\"is not\" with 'int' literal. Did you mean \"!=\"?:SyntaxWarning:.*opuslib.api.decoder",
|
||||||
# https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19
|
# https://pypi.org/project/pilight/ - v0.1.1 - 2016-10-19
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:pilight",
|
"ignore:pkg_resources is deprecated as an API:UserWarning:pilight",
|
||||||
# https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16
|
# https://pypi.org/project/plumlightpad/ - v0.0.11 - 2018-10-16
|
||||||
"ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery",
|
"ignore:invalid escape sequence:SyntaxWarning:.*plumlightpad.plumdiscovery",
|
||||||
"ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)",
|
"ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*plumlightpad.(lightpad|logicalload)",
|
||||||
@@ -673,8 +675,6 @@ filterwarnings = [
|
|||||||
# https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19
|
# https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19
|
||||||
"ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_",
|
"ignore:client.loop property is deprecated:DeprecationWarning:pyqwikswitch.async_",
|
||||||
"ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_",
|
"ignore:with timeout\\(\\) is deprecated:DeprecationWarning:pyqwikswitch.async_",
|
||||||
# https://pypi.org/project/Rx/ - v3.2.0 - 2021-04-25
|
|
||||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants",
|
|
||||||
# https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10
|
# https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10
|
||||||
"ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp",
|
"ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp",
|
||||||
]
|
]
|
||||||
|
4
requirements.txt
generated
4
requirements.txt
generated
@@ -5,7 +5,7 @@
|
|||||||
# Home Assistant Core
|
# Home Assistant Core
|
||||||
aiodns==3.4.0
|
aiodns==3.4.0
|
||||||
aiohasupervisor==0.3.1
|
aiohasupervisor==0.3.1
|
||||||
aiohttp==3.12.6
|
aiohttp==3.12.7
|
||||||
aiohttp_cors==0.8.1
|
aiohttp_cors==0.8.1
|
||||||
aiohttp-fast-zlib==0.2.3
|
aiohttp-fast-zlib==0.2.3
|
||||||
aiohttp-asyncmdnsresolver==0.1.1
|
aiohttp-asyncmdnsresolver==0.1.1
|
||||||
@@ -51,7 +51,7 @@ securetar==2025.2.1
|
|||||||
SQLAlchemy==2.0.41
|
SQLAlchemy==2.0.41
|
||||||
standard-aifc==3.13.0
|
standard-aifc==3.13.0
|
||||||
standard-telnetlib==3.13.0
|
standard-telnetlib==3.13.0
|
||||||
typing-extensions>=4.13.0,<5.0
|
typing-extensions>=4.14.0,<5.0
|
||||||
ulid-transform==1.4.0
|
ulid-transform==1.4.0
|
||||||
urllib3>=1.26.5,<2
|
urllib3>=1.26.5,<2
|
||||||
uv==0.7.1
|
uv==0.7.1
|
||||||
|
26
requirements_all.txt
generated
26
requirements_all.txt
generated
@@ -182,7 +182,7 @@ aioairzone-cloud==0.6.12
|
|||||||
aioairzone==1.0.0
|
aioairzone==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.amazon_devices
|
# homeassistant.components.amazon_devices
|
||||||
aioamazondevices==3.0.4
|
aioamazondevices==3.0.5
|
||||||
|
|
||||||
# homeassistant.components.ambient_network
|
# homeassistant.components.ambient_network
|
||||||
# homeassistant.components.ambient_station
|
# homeassistant.components.ambient_station
|
||||||
@@ -268,7 +268,7 @@ aiohasupervisor==0.3.1
|
|||||||
aiohomeconnect==0.17.1
|
aiohomeconnect==0.17.1
|
||||||
|
|
||||||
# homeassistant.components.homekit_controller
|
# homeassistant.components.homekit_controller
|
||||||
aiohomekit==3.2.14
|
aiohomekit==3.2.15
|
||||||
|
|
||||||
# homeassistant.components.mcp_server
|
# homeassistant.components.mcp_server
|
||||||
aiohttp_sse==2.2.0
|
aiohttp_sse==2.2.0
|
||||||
@@ -289,7 +289,7 @@ aiokafka==0.10.0
|
|||||||
aiokef==0.2.16
|
aiokef==0.2.16
|
||||||
|
|
||||||
# homeassistant.components.rehlko
|
# homeassistant.components.rehlko
|
||||||
aiokem==0.5.12
|
aiokem==1.0.1
|
||||||
|
|
||||||
# homeassistant.components.lifx
|
# homeassistant.components.lifx
|
||||||
aiolifx-effects==0.3.2
|
aiolifx-effects==0.3.2
|
||||||
@@ -610,7 +610,7 @@ bizkaibus==0.1.1
|
|||||||
|
|
||||||
# homeassistant.components.eq3btsmart
|
# homeassistant.components.eq3btsmart
|
||||||
# homeassistant.components.esphome
|
# homeassistant.components.esphome
|
||||||
bleak-esphome==2.15.1
|
bleak-esphome==2.16.0
|
||||||
|
|
||||||
# homeassistant.components.bluetooth
|
# homeassistant.components.bluetooth
|
||||||
bleak-retry-connector==3.9.0
|
bleak-retry-connector==3.9.0
|
||||||
@@ -1026,7 +1026,7 @@ gitterpy==0.1.7
|
|||||||
glances-api==0.8.0
|
glances-api==0.8.0
|
||||||
|
|
||||||
# homeassistant.components.go2rtc
|
# homeassistant.components.go2rtc
|
||||||
go2rtc-client==0.1.3b0
|
go2rtc-client==0.2.1
|
||||||
|
|
||||||
# homeassistant.components.goalzero
|
# homeassistant.components.goalzero
|
||||||
goalzero==0.2.2
|
goalzero==0.2.2
|
||||||
@@ -1572,7 +1572,7 @@ oemthermostat==1.1.1
|
|||||||
ohme==1.5.1
|
ohme==1.5.1
|
||||||
|
|
||||||
# homeassistant.components.ollama
|
# homeassistant.components.ollama
|
||||||
ollama==0.4.7
|
ollama==0.5.1
|
||||||
|
|
||||||
# homeassistant.components.omnilogic
|
# homeassistant.components.omnilogic
|
||||||
omnilogic==0.4.5
|
omnilogic==0.4.5
|
||||||
@@ -1762,7 +1762,7 @@ py-madvr2==1.6.32
|
|||||||
py-melissa-climate==2.1.4
|
py-melissa-climate==2.1.4
|
||||||
|
|
||||||
# homeassistant.components.nextbus
|
# homeassistant.components.nextbus
|
||||||
py-nextbusnext==2.1.2
|
py-nextbusnext==2.2.0
|
||||||
|
|
||||||
# homeassistant.components.nightscout
|
# homeassistant.components.nightscout
|
||||||
py-nightscout==1.2.2
|
py-nightscout==1.2.2
|
||||||
@@ -1841,7 +1841,7 @@ pyasuswrt==0.1.21
|
|||||||
pyatag==0.3.5.3
|
pyatag==0.3.5.3
|
||||||
|
|
||||||
# homeassistant.components.netatmo
|
# homeassistant.components.netatmo
|
||||||
pyatmo==9.2.0
|
pyatmo==9.2.1
|
||||||
|
|
||||||
# homeassistant.components.apple_tv
|
# homeassistant.components.apple_tv
|
||||||
pyatv==0.16.0
|
pyatv==0.16.0
|
||||||
@@ -2221,7 +2221,7 @@ pyotgw==2.2.2
|
|||||||
pyotp==2.8.0
|
pyotp==2.8.0
|
||||||
|
|
||||||
# homeassistant.components.overkiz
|
# homeassistant.components.overkiz
|
||||||
pyoverkiz==1.17.1
|
pyoverkiz==1.17.2
|
||||||
|
|
||||||
# homeassistant.components.onewire
|
# homeassistant.components.onewire
|
||||||
pyownet==0.10.0.post1
|
pyownet==0.10.0.post1
|
||||||
@@ -2353,7 +2353,7 @@ pysmhi==1.0.2
|
|||||||
pysml==0.0.12
|
pysml==0.0.12
|
||||||
|
|
||||||
# homeassistant.components.smlight
|
# homeassistant.components.smlight
|
||||||
pysmlight==0.2.5
|
pysmlight==0.2.6
|
||||||
|
|
||||||
# homeassistant.components.snmp
|
# homeassistant.components.snmp
|
||||||
pysnmp==6.2.6
|
pysnmp==6.2.6
|
||||||
@@ -2434,7 +2434,7 @@ python-google-drive-api==0.1.0
|
|||||||
python-homeassistant-analytics==0.9.0
|
python-homeassistant-analytics==0.9.0
|
||||||
|
|
||||||
# homeassistant.components.homewizard
|
# homeassistant.components.homewizard
|
||||||
python-homewizard-energy==v8.3.2
|
python-homewizard-energy==8.3.3
|
||||||
|
|
||||||
# homeassistant.components.hp_ilo
|
# homeassistant.components.hp_ilo
|
||||||
python-hpilo==4.4.3
|
python-hpilo==4.4.3
|
||||||
@@ -2486,7 +2486,7 @@ python-otbr-api==2.7.0
|
|||||||
python-overseerr==0.7.1
|
python-overseerr==0.7.1
|
||||||
|
|
||||||
# homeassistant.components.picnic
|
# homeassistant.components.picnic
|
||||||
python-picnic-api2==1.2.4
|
python-picnic-api2==1.3.1
|
||||||
|
|
||||||
# homeassistant.components.rabbitair
|
# homeassistant.components.rabbitair
|
||||||
python-rabbitair==0.0.8
|
python-rabbitair==0.0.8
|
||||||
@@ -2652,7 +2652,7 @@ renault-api==0.3.1
|
|||||||
renson-endura-delta==1.7.2
|
renson-endura-delta==1.7.2
|
||||||
|
|
||||||
# homeassistant.components.reolink
|
# homeassistant.components.reolink
|
||||||
reolink-aio==0.13.4
|
reolink-aio==0.13.5
|
||||||
|
|
||||||
# homeassistant.components.idteck_prox
|
# homeassistant.components.idteck_prox
|
||||||
rfk101py==0.0.1
|
rfk101py==0.0.1
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
astroid==3.3.10
|
astroid==3.3.10
|
||||||
coverage==7.8.2
|
coverage==7.8.2
|
||||||
freezegun==1.5.2
|
freezegun==1.5.2
|
||||||
go2rtc-client==0.1.3b0
|
go2rtc-client==0.2.1
|
||||||
license-expression==30.4.1
|
license-expression==30.4.1
|
||||||
mock-open==1.4.0
|
mock-open==1.4.0
|
||||||
mypy-dev==1.17.0a2
|
mypy-dev==1.17.0a2
|
||||||
|
26
requirements_test_all.txt
generated
26
requirements_test_all.txt
generated
@@ -170,7 +170,7 @@ aioairzone-cloud==0.6.12
|
|||||||
aioairzone==1.0.0
|
aioairzone==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.amazon_devices
|
# homeassistant.components.amazon_devices
|
||||||
aioamazondevices==3.0.4
|
aioamazondevices==3.0.5
|
||||||
|
|
||||||
# homeassistant.components.ambient_network
|
# homeassistant.components.ambient_network
|
||||||
# homeassistant.components.ambient_station
|
# homeassistant.components.ambient_station
|
||||||
@@ -253,7 +253,7 @@ aiohasupervisor==0.3.1
|
|||||||
aiohomeconnect==0.17.1
|
aiohomeconnect==0.17.1
|
||||||
|
|
||||||
# homeassistant.components.homekit_controller
|
# homeassistant.components.homekit_controller
|
||||||
aiohomekit==3.2.14
|
aiohomekit==3.2.15
|
||||||
|
|
||||||
# homeassistant.components.mcp_server
|
# homeassistant.components.mcp_server
|
||||||
aiohttp_sse==2.2.0
|
aiohttp_sse==2.2.0
|
||||||
@@ -271,7 +271,7 @@ aioimmich==0.8.0
|
|||||||
aiokafka==0.10.0
|
aiokafka==0.10.0
|
||||||
|
|
||||||
# homeassistant.components.rehlko
|
# homeassistant.components.rehlko
|
||||||
aiokem==0.5.12
|
aiokem==1.0.1
|
||||||
|
|
||||||
# homeassistant.components.lifx
|
# homeassistant.components.lifx
|
||||||
aiolifx-effects==0.3.2
|
aiolifx-effects==0.3.2
|
||||||
@@ -541,7 +541,7 @@ bimmer-connected[china]==0.17.2
|
|||||||
|
|
||||||
# homeassistant.components.eq3btsmart
|
# homeassistant.components.eq3btsmart
|
||||||
# homeassistant.components.esphome
|
# homeassistant.components.esphome
|
||||||
bleak-esphome==2.15.1
|
bleak-esphome==2.16.0
|
||||||
|
|
||||||
# homeassistant.components.bluetooth
|
# homeassistant.components.bluetooth
|
||||||
bleak-retry-connector==3.9.0
|
bleak-retry-connector==3.9.0
|
||||||
@@ -887,7 +887,7 @@ gios==6.0.0
|
|||||||
glances-api==0.8.0
|
glances-api==0.8.0
|
||||||
|
|
||||||
# homeassistant.components.go2rtc
|
# homeassistant.components.go2rtc
|
||||||
go2rtc-client==0.1.3b0
|
go2rtc-client==0.2.1
|
||||||
|
|
||||||
# homeassistant.components.goalzero
|
# homeassistant.components.goalzero
|
||||||
goalzero==0.2.2
|
goalzero==0.2.2
|
||||||
@@ -1334,7 +1334,7 @@ odp-amsterdam==6.1.1
|
|||||||
ohme==1.5.1
|
ohme==1.5.1
|
||||||
|
|
||||||
# homeassistant.components.ollama
|
# homeassistant.components.ollama
|
||||||
ollama==0.4.7
|
ollama==0.5.1
|
||||||
|
|
||||||
# homeassistant.components.omnilogic
|
# homeassistant.components.omnilogic
|
||||||
omnilogic==0.4.5
|
omnilogic==0.4.5
|
||||||
@@ -1482,7 +1482,7 @@ py-madvr2==1.6.32
|
|||||||
py-melissa-climate==2.1.4
|
py-melissa-climate==2.1.4
|
||||||
|
|
||||||
# homeassistant.components.nextbus
|
# homeassistant.components.nextbus
|
||||||
py-nextbusnext==2.1.2
|
py-nextbusnext==2.2.0
|
||||||
|
|
||||||
# homeassistant.components.nightscout
|
# homeassistant.components.nightscout
|
||||||
py-nightscout==1.2.2
|
py-nightscout==1.2.2
|
||||||
@@ -1540,7 +1540,7 @@ pyasuswrt==0.1.21
|
|||||||
pyatag==0.3.5.3
|
pyatag==0.3.5.3
|
||||||
|
|
||||||
# homeassistant.components.netatmo
|
# homeassistant.components.netatmo
|
||||||
pyatmo==9.2.0
|
pyatmo==9.2.1
|
||||||
|
|
||||||
# homeassistant.components.apple_tv
|
# homeassistant.components.apple_tv
|
||||||
pyatv==0.16.0
|
pyatv==0.16.0
|
||||||
@@ -1842,7 +1842,7 @@ pyotgw==2.2.2
|
|||||||
pyotp==2.8.0
|
pyotp==2.8.0
|
||||||
|
|
||||||
# homeassistant.components.overkiz
|
# homeassistant.components.overkiz
|
||||||
pyoverkiz==1.17.1
|
pyoverkiz==1.17.2
|
||||||
|
|
||||||
# homeassistant.components.onewire
|
# homeassistant.components.onewire
|
||||||
pyownet==0.10.0.post1
|
pyownet==0.10.0.post1
|
||||||
@@ -1950,7 +1950,7 @@ pysmhi==1.0.2
|
|||||||
pysml==0.0.12
|
pysml==0.0.12
|
||||||
|
|
||||||
# homeassistant.components.smlight
|
# homeassistant.components.smlight
|
||||||
pysmlight==0.2.5
|
pysmlight==0.2.6
|
||||||
|
|
||||||
# homeassistant.components.snmp
|
# homeassistant.components.snmp
|
||||||
pysnmp==6.2.6
|
pysnmp==6.2.6
|
||||||
@@ -2007,7 +2007,7 @@ python-google-drive-api==0.1.0
|
|||||||
python-homeassistant-analytics==0.9.0
|
python-homeassistant-analytics==0.9.0
|
||||||
|
|
||||||
# homeassistant.components.homewizard
|
# homeassistant.components.homewizard
|
||||||
python-homewizard-energy==v8.3.2
|
python-homewizard-energy==8.3.3
|
||||||
|
|
||||||
# homeassistant.components.izone
|
# homeassistant.components.izone
|
||||||
python-izone==1.2.9
|
python-izone==1.2.9
|
||||||
@@ -2053,7 +2053,7 @@ python-otbr-api==2.7.0
|
|||||||
python-overseerr==0.7.1
|
python-overseerr==0.7.1
|
||||||
|
|
||||||
# homeassistant.components.picnic
|
# homeassistant.components.picnic
|
||||||
python-picnic-api2==1.2.4
|
python-picnic-api2==1.3.1
|
||||||
|
|
||||||
# homeassistant.components.rabbitair
|
# homeassistant.components.rabbitair
|
||||||
python-rabbitair==0.0.8
|
python-rabbitair==0.0.8
|
||||||
@@ -2192,7 +2192,7 @@ renault-api==0.3.1
|
|||||||
renson-endura-delta==1.7.2
|
renson-endura-delta==1.7.2
|
||||||
|
|
||||||
# homeassistant.components.reolink
|
# homeassistant.components.reolink
|
||||||
reolink-aio==0.13.4
|
reolink-aio==0.13.5
|
||||||
|
|
||||||
# homeassistant.components.rflink
|
# homeassistant.components.rflink
|
||||||
rflink==0.0.66
|
rflink==0.0.66
|
||||||
|
4
requirements_test_pre_commit.txt
generated
4
requirements_test_pre_commit.txt
generated
@@ -1,5 +1,5 @@
|
|||||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||||
|
|
||||||
codespell==2.4.1
|
codespell==2.4.1
|
||||||
ruff==0.11.0
|
ruff==0.11.12
|
||||||
yamllint==1.35.1
|
yamllint==1.37.1
|
||||||
|
@@ -113,9 +113,9 @@ httplib2>=0.19.0
|
|||||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||||
# upgrades intentionally. It is a large package to build from source and we
|
# upgrades intentionally. It is a large package to build from source and we
|
||||||
# want to ensure we have wheels built.
|
# want to ensure we have wheels built.
|
||||||
grpcio==1.72.0
|
grpcio==1.72.1
|
||||||
grpcio-status==1.72.0
|
grpcio-status==1.72.1
|
||||||
grpcio-reflection==1.72.0
|
grpcio-reflection==1.72.1
|
||||||
|
|
||||||
# This is a old unmaintained library and is replaced with pycryptodome
|
# This is a old unmaintained library and is replaced with pycryptodome
|
||||||
pycrypto==1000000000.0.0
|
pycrypto==1000000000.0.0
|
||||||
@@ -249,6 +249,10 @@ GENERATED_MESSAGE = (
|
|||||||
f"# Automatically generated by {Path(__file__).name}, do not edit\n\n"
|
f"# Automatically generated by {Path(__file__).name}, do not edit\n\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
MAP_HOOK_ID_TO_PACKAGE = {
|
||||||
|
"ruff-check": "ruff",
|
||||||
|
}
|
||||||
|
|
||||||
IGNORE_PRE_COMMIT_HOOK_ID = (
|
IGNORE_PRE_COMMIT_HOOK_ID = (
|
||||||
"check-executables-have-shebangs",
|
"check-executables-have-shebangs",
|
||||||
"check-json",
|
"check-json",
|
||||||
@@ -523,7 +527,8 @@ def requirements_pre_commit_output() -> str:
|
|||||||
rev: str = repo["rev"]
|
rev: str = repo["rev"]
|
||||||
for hook in repo["hooks"]:
|
for hook in repo["hooks"]:
|
||||||
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
|
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
|
||||||
reqs.append(f"{hook['id']}=={rev.lstrip('v')}")
|
pkg = MAP_HOOK_ID_TO_PACKAGE.get(hook["id"]) or hook["id"]
|
||||||
|
reqs.append(f"{pkg}=={rev.lstrip('v')}")
|
||||||
reqs.extend(x for x in hook.get("additional_dependencies", ()))
|
reqs.extend(x for x in hook.get("additional_dependencies", ()))
|
||||||
output = [
|
output = [
|
||||||
f"# Automatically generated "
|
f"# Automatically generated "
|
||||||
|
4
script/hassfest/docker/Dockerfile
generated
4
script/hassfest/docker/Dockerfile
generated
@@ -24,8 +24,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.7.1,source=/uv,target=/bin/uv \
|
|||||||
--no-cache \
|
--no-cache \
|
||||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||||
-r /usr/src/homeassistant/requirements.txt \
|
-r /usr/src/homeassistant/requirements.txt \
|
||||||
stdlib-list==0.10.0 pipdeptree==2.26.1 tqdm==4.67.1 ruff==0.11.0 \
|
stdlib-list==0.10.0 pipdeptree==2.26.1 tqdm==4.67.1 ruff==0.11.12 \
|
||||||
PyTurboJPEG==1.8.0 go2rtc-client==0.1.3b0 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
PyTurboJPEG==1.8.0 go2rtc-client==0.2.1 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||||
|
|
||||||
LABEL "name"="hassfest"
|
LABEL "name"="hassfest"
|
||||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||||
|
@@ -26,6 +26,7 @@ from .model import Config, Integration
|
|||||||
PACKAGE_CHECK_VERSION_RANGE = {
|
PACKAGE_CHECK_VERSION_RANGE = {
|
||||||
"aiohttp": "SemVer",
|
"aiohttp": "SemVer",
|
||||||
"attrs": "CalVer",
|
"attrs": "CalVer",
|
||||||
|
"awesomeversion": "CalVer",
|
||||||
"grpcio": "SemVer",
|
"grpcio": "SemVer",
|
||||||
"httpx": "SemVer",
|
"httpx": "SemVer",
|
||||||
"mashumaro": "SemVer",
|
"mashumaro": "SemVer",
|
||||||
@@ -40,13 +41,9 @@ PACKAGE_CHECK_VERSION_RANGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
|
|||||||
# - domain is the integration domain
|
# - domain is the integration domain
|
||||||
# - package is the package (can be transitive) referencing the dependency
|
# - package is the package (can be transitive) referencing the dependency
|
||||||
# - dependencyX should be the name of the referenced dependency
|
# - dependencyX should be the name of the referenced dependency
|
||||||
"ollama": {
|
"mealie": {
|
||||||
# https://github.com/ollama/ollama-python/pull/445 (not yet released)
|
# https://github.com/joostlek/python-mealie/pull/490
|
||||||
"ollama": {"httpx"}
|
"aiomealie": {"awesomeversion"}
|
||||||
},
|
|
||||||
"overkiz": {
|
|
||||||
# https://github.com/iMicknl/python-overkiz-api/issues/1644 (not yet released)
|
|
||||||
"pyoverkiz": {"attrs"},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -333,14 +330,6 @@ PYTHON_VERSION_CHECK_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
|
|||||||
# https://github.com/EuleMitKeule/eq3btsmart/releases/tag/2.0.0
|
# https://github.com/EuleMitKeule/eq3btsmart/releases/tag/2.0.0
|
||||||
"homeassistant": {"eq3btsmart"}
|
"homeassistant": {"eq3btsmart"}
|
||||||
},
|
},
|
||||||
"homekit_controller": {
|
|
||||||
# https://github.com/Jc2k/aiohomekit/issues/456
|
|
||||||
"homeassistant": {"aiohomekit"}
|
|
||||||
},
|
|
||||||
"netatmo": {
|
|
||||||
# https://github.com/jabesq-org/pyatmo/pull/533 (not yet released)
|
|
||||||
"homeassistant": {"pyatmo"}
|
|
||||||
},
|
|
||||||
"python_script": {
|
"python_script": {
|
||||||
# Security audits are needed for each Python version
|
# Security audits are needed for each Python version
|
||||||
"homeassistant": {"restrictedpython"}
|
"homeassistant": {"restrictedpython"}
|
||||||
|
@@ -196,6 +196,7 @@ EXCEPTIONS = {
|
|||||||
"maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48
|
"maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48
|
||||||
"neurio", # https://github.com/jordanh/neurio-python/pull/13
|
"neurio", # https://github.com/jordanh/neurio-python/pull/13
|
||||||
"nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14
|
"nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14
|
||||||
|
"ollama", # https://github.com/ollama/ollama-python/pull/526
|
||||||
"pigpio", # https://github.com/joan2937/pigpio/pull/608
|
"pigpio", # https://github.com/joan2937/pigpio/pull/608
|
||||||
"pymitv", # MIT
|
"pymitv", # MIT
|
||||||
"pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5
|
"pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5
|
||||||
|
@@ -15,7 +15,7 @@ printf "%s\n" $files
|
|||||||
echo "=============="
|
echo "=============="
|
||||||
echo "LINT with ruff"
|
echo "LINT with ruff"
|
||||||
echo "=============="
|
echo "=============="
|
||||||
pre-commit run ruff --files $files
|
pre-commit run ruff-check --files $files
|
||||||
echo "================"
|
echo "================"
|
||||||
echo "LINT with pylint"
|
echo "LINT with pylint"
|
||||||
echo "================"
|
echo "================"
|
||||||
|
@@ -43,6 +43,7 @@ CLOUD_DEVICE_DATA: dict[str, Any] = [
|
|||||||
"temperature": 15,
|
"temperature": 15,
|
||||||
"targetTemperature": 20,
|
"targetTemperature": 20,
|
||||||
"heatingEnabled": True,
|
"heatingEnabled": True,
|
||||||
|
"energyWh": 1500,
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -70,9 +71,17 @@ def mock_adax_cloud():
|
|||||||
with patch("homeassistant.components.adax.coordinator.Adax") as mock_adax:
|
with patch("homeassistant.components.adax.coordinator.Adax") as mock_adax:
|
||||||
mock_adax_class = mock_adax.return_value
|
mock_adax_class = mock_adax.return_value
|
||||||
|
|
||||||
|
mock_adax_class.fetch_rooms_info = AsyncMock()
|
||||||
|
mock_adax_class.fetch_rooms_info.return_value = CLOUD_DEVICE_DATA
|
||||||
|
|
||||||
mock_adax_class.get_rooms = AsyncMock()
|
mock_adax_class.get_rooms = AsyncMock()
|
||||||
mock_adax_class.get_rooms.return_value = CLOUD_DEVICE_DATA
|
mock_adax_class.get_rooms.return_value = CLOUD_DEVICE_DATA
|
||||||
|
|
||||||
|
mock_adax_class.fetch_energy_info = AsyncMock()
|
||||||
|
mock_adax_class.fetch_energy_info.return_value = [
|
||||||
|
{"deviceId": "1", "energyWh": 1500}
|
||||||
|
]
|
||||||
|
|
||||||
mock_adax_class.update = AsyncMock()
|
mock_adax_class.update = AsyncMock()
|
||||||
mock_adax_class.update.return_value = None
|
mock_adax_class.update.return_value = None
|
||||||
yield mock_adax_class
|
yield mock_adax_class
|
||||||
|
237
tests/components/adax/snapshots/test_sensor.ambr
Normal file
237
tests/components/adax/snapshots/test_sensor.ambr
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
# serializer version: 1
|
||||||
|
# name: test_fallback_to_get_rooms[sensor.room_1_energy-entry]
|
||||||
|
EntityRegistryEntrySnapshot({
|
||||||
|
'aliases': set({
|
||||||
|
}),
|
||||||
|
'area_id': None,
|
||||||
|
'capabilities': dict({
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
}),
|
||||||
|
'config_entry_id': <ANY>,
|
||||||
|
'config_subentry_id': <ANY>,
|
||||||
|
'device_class': None,
|
||||||
|
'device_id': <ANY>,
|
||||||
|
'disabled_by': None,
|
||||||
|
'domain': 'sensor',
|
||||||
|
'entity_category': None,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'has_entity_name': True,
|
||||||
|
'hidden_by': None,
|
||||||
|
'icon': None,
|
||||||
|
'id': <ANY>,
|
||||||
|
'labels': set({
|
||||||
|
}),
|
||||||
|
'name': None,
|
||||||
|
'options': dict({
|
||||||
|
'sensor': dict({
|
||||||
|
'suggested_display_precision': 3,
|
||||||
|
}),
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||||
|
'original_icon': None,
|
||||||
|
'original_name': 'Energy',
|
||||||
|
'platform': 'adax',
|
||||||
|
'previous_unique_id': None,
|
||||||
|
'suggested_object_id': None,
|
||||||
|
'supported_features': 0,
|
||||||
|
'translation_key': 'energy',
|
||||||
|
'unique_id': '1_1_energy',
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_fallback_to_get_rooms[sensor.room_1_energy-state]
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'energy',
|
||||||
|
'friendly_name': 'Room 1 Energy',
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '0.0',
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_multiple_devices_create_individual_sensors[sensor.room_1_energy-entry]
|
||||||
|
EntityRegistryEntrySnapshot({
|
||||||
|
'aliases': set({
|
||||||
|
}),
|
||||||
|
'area_id': None,
|
||||||
|
'capabilities': dict({
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
}),
|
||||||
|
'config_entry_id': <ANY>,
|
||||||
|
'config_subentry_id': <ANY>,
|
||||||
|
'device_class': None,
|
||||||
|
'device_id': <ANY>,
|
||||||
|
'disabled_by': None,
|
||||||
|
'domain': 'sensor',
|
||||||
|
'entity_category': None,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'has_entity_name': True,
|
||||||
|
'hidden_by': None,
|
||||||
|
'icon': None,
|
||||||
|
'id': <ANY>,
|
||||||
|
'labels': set({
|
||||||
|
}),
|
||||||
|
'name': None,
|
||||||
|
'options': dict({
|
||||||
|
'sensor': dict({
|
||||||
|
'suggested_display_precision': 3,
|
||||||
|
}),
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||||
|
'original_icon': None,
|
||||||
|
'original_name': 'Energy',
|
||||||
|
'platform': 'adax',
|
||||||
|
'previous_unique_id': None,
|
||||||
|
'suggested_object_id': None,
|
||||||
|
'supported_features': 0,
|
||||||
|
'translation_key': 'energy',
|
||||||
|
'unique_id': '1_1_energy',
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_multiple_devices_create_individual_sensors[sensor.room_1_energy-state]
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'energy',
|
||||||
|
'friendly_name': 'Room 1 Energy',
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '1.5',
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_multiple_devices_create_individual_sensors[sensor.room_2_energy-entry]
|
||||||
|
EntityRegistryEntrySnapshot({
|
||||||
|
'aliases': set({
|
||||||
|
}),
|
||||||
|
'area_id': None,
|
||||||
|
'capabilities': dict({
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
}),
|
||||||
|
'config_entry_id': <ANY>,
|
||||||
|
'config_subentry_id': <ANY>,
|
||||||
|
'device_class': None,
|
||||||
|
'device_id': <ANY>,
|
||||||
|
'disabled_by': None,
|
||||||
|
'domain': 'sensor',
|
||||||
|
'entity_category': None,
|
||||||
|
'entity_id': 'sensor.room_2_energy',
|
||||||
|
'has_entity_name': True,
|
||||||
|
'hidden_by': None,
|
||||||
|
'icon': None,
|
||||||
|
'id': <ANY>,
|
||||||
|
'labels': set({
|
||||||
|
}),
|
||||||
|
'name': None,
|
||||||
|
'options': dict({
|
||||||
|
'sensor': dict({
|
||||||
|
'suggested_display_precision': 3,
|
||||||
|
}),
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||||
|
'original_icon': None,
|
||||||
|
'original_name': 'Energy',
|
||||||
|
'platform': 'adax',
|
||||||
|
'previous_unique_id': None,
|
||||||
|
'suggested_object_id': None,
|
||||||
|
'supported_features': 0,
|
||||||
|
'translation_key': 'energy',
|
||||||
|
'unique_id': '1_2_energy',
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_multiple_devices_create_individual_sensors[sensor.room_2_energy-state]
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'energy',
|
||||||
|
'friendly_name': 'Room 2 Energy',
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.room_2_energy',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '2.5',
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_sensor_cloud[sensor.room_1_energy-entry]
|
||||||
|
EntityRegistryEntrySnapshot({
|
||||||
|
'aliases': set({
|
||||||
|
}),
|
||||||
|
'area_id': None,
|
||||||
|
'capabilities': dict({
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
}),
|
||||||
|
'config_entry_id': <ANY>,
|
||||||
|
'config_subentry_id': <ANY>,
|
||||||
|
'device_class': None,
|
||||||
|
'device_id': <ANY>,
|
||||||
|
'disabled_by': None,
|
||||||
|
'domain': 'sensor',
|
||||||
|
'entity_category': None,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'has_entity_name': True,
|
||||||
|
'hidden_by': None,
|
||||||
|
'icon': None,
|
||||||
|
'id': <ANY>,
|
||||||
|
'labels': set({
|
||||||
|
}),
|
||||||
|
'name': None,
|
||||||
|
'options': dict({
|
||||||
|
'sensor': dict({
|
||||||
|
'suggested_display_precision': 3,
|
||||||
|
}),
|
||||||
|
'sensor.private': dict({
|
||||||
|
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||||
|
'original_icon': None,
|
||||||
|
'original_name': 'Energy',
|
||||||
|
'platform': 'adax',
|
||||||
|
'previous_unique_id': None,
|
||||||
|
'suggested_object_id': None,
|
||||||
|
'supported_features': 0,
|
||||||
|
'translation_key': 'energy',
|
||||||
|
'unique_id': '1_1_energy',
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_sensor_cloud[sensor.room_1_energy-state]
|
||||||
|
StateSnapshot({
|
||||||
|
'attributes': ReadOnlyDict({
|
||||||
|
'device_class': 'energy',
|
||||||
|
'friendly_name': 'Room 1 Energy',
|
||||||
|
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
|
||||||
|
'unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||||
|
}),
|
||||||
|
'context': <ANY>,
|
||||||
|
'entity_id': 'sensor.room_1_energy',
|
||||||
|
'last_changed': <ANY>,
|
||||||
|
'last_reported': <ANY>,
|
||||||
|
'last_updated': <ANY>,
|
||||||
|
'state': '1.5',
|
||||||
|
})
|
||||||
|
# ---
|
@@ -20,7 +20,7 @@ async def test_climate_cloud(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test states of the (cloud) Climate entity."""
|
"""Test states of the (cloud) Climate entity."""
|
||||||
await setup_integration(hass, mock_cloud_config_entry)
|
await setup_integration(hass, mock_cloud_config_entry)
|
||||||
mock_adax_cloud.get_rooms.assert_called_once()
|
mock_adax_cloud.fetch_rooms_info.assert_called_once()
|
||||||
|
|
||||||
assert len(hass.states.async_entity_ids(Platform.CLIMATE)) == 1
|
assert len(hass.states.async_entity_ids(Platform.CLIMATE)) == 1
|
||||||
entity_id = hass.states.async_entity_ids(Platform.CLIMATE)[0]
|
entity_id = hass.states.async_entity_ids(Platform.CLIMATE)[0]
|
||||||
@@ -37,7 +37,7 @@ async def test_climate_cloud(
|
|||||||
== CLOUD_DEVICE_DATA[0]["temperature"]
|
== CLOUD_DEVICE_DATA[0]["temperature"]
|
||||||
)
|
)
|
||||||
|
|
||||||
mock_adax_cloud.get_rooms.side_effect = Exception()
|
mock_adax_cloud.fetch_rooms_info.side_effect = Exception()
|
||||||
freezer.tick(SCAN_INTERVAL)
|
freezer.tick(SCAN_INTERVAL)
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
121
tests/components/adax/test_sensor.py
Normal file
121
tests/components/adax/test_sensor.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
"""Test Adax sensor entity."""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
from syrupy.assertion import SnapshotAssertion
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
|
||||||
|
from . import setup_integration
|
||||||
|
|
||||||
|
from tests.common import MockConfigEntry, snapshot_platform
|
||||||
|
|
||||||
|
|
||||||
|
async def test_sensor_cloud(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_adax_cloud: AsyncMock,
|
||||||
|
mock_cloud_config_entry: MockConfigEntry,
|
||||||
|
snapshot: SnapshotAssertion,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test sensor setup for cloud connection."""
|
||||||
|
with patch("homeassistant.components.adax.PLATFORMS", [Platform.SENSOR]):
|
||||||
|
await setup_integration(hass, mock_cloud_config_entry)
|
||||||
|
# Now we use fetch_rooms_info as primary method
|
||||||
|
mock_adax_cloud.fetch_rooms_info.assert_called_once()
|
||||||
|
|
||||||
|
await snapshot_platform(
|
||||||
|
hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_sensor_local_not_created(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_adax_local: AsyncMock,
|
||||||
|
mock_local_config_entry: MockConfigEntry,
|
||||||
|
) -> None:
|
||||||
|
"""Test that sensors are not created for local connection."""
|
||||||
|
with patch("homeassistant.components.adax.PLATFORMS", [Platform.SENSOR]):
|
||||||
|
await setup_integration(hass, mock_local_config_entry)
|
||||||
|
|
||||||
|
# No sensor entities should be created for local connection
|
||||||
|
sensor_entities = hass.states.async_entity_ids("sensor")
|
||||||
|
adax_sensors = [e for e in sensor_entities if "adax" in e or "room" in e]
|
||||||
|
assert len(adax_sensors) == 0
|
||||||
|
|
||||||
|
|
||||||
|
async def test_multiple_devices_create_individual_sensors(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_adax_cloud: AsyncMock,
|
||||||
|
mock_cloud_config_entry: MockConfigEntry,
|
||||||
|
snapshot: SnapshotAssertion,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test that multiple devices create individual sensors."""
|
||||||
|
# Mock multiple devices for both fetch_rooms_info and get_rooms (fallback)
|
||||||
|
multiple_devices_data = [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"homeId": "1",
|
||||||
|
"name": "Room 1",
|
||||||
|
"temperature": 15,
|
||||||
|
"targetTemperature": 20,
|
||||||
|
"heatingEnabled": True,
|
||||||
|
"energyWh": 1500,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"homeId": "1",
|
||||||
|
"name": "Room 2",
|
||||||
|
"temperature": 18,
|
||||||
|
"targetTemperature": 22,
|
||||||
|
"heatingEnabled": True,
|
||||||
|
"energyWh": 2500,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_adax_cloud.fetch_rooms_info.return_value = multiple_devices_data
|
||||||
|
mock_adax_cloud.get_rooms.return_value = multiple_devices_data
|
||||||
|
|
||||||
|
with patch("homeassistant.components.adax.PLATFORMS", [Platform.SENSOR]):
|
||||||
|
await setup_integration(hass, mock_cloud_config_entry)
|
||||||
|
|
||||||
|
await snapshot_platform(
|
||||||
|
hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fallback_to_get_rooms(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_adax_cloud: AsyncMock,
|
||||||
|
mock_cloud_config_entry: MockConfigEntry,
|
||||||
|
snapshot: SnapshotAssertion,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test fallback to get_rooms when fetch_rooms_info returns empty list."""
|
||||||
|
# Mock fetch_rooms_info to return empty list, get_rooms to return data
|
||||||
|
mock_adax_cloud.fetch_rooms_info.return_value = []
|
||||||
|
mock_adax_cloud.get_rooms.return_value = [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"homeId": "1",
|
||||||
|
"name": "Room 1",
|
||||||
|
"temperature": 15,
|
||||||
|
"targetTemperature": 20,
|
||||||
|
"heatingEnabled": True,
|
||||||
|
"energyWh": 0, # No energy data from get_rooms
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with patch("homeassistant.components.adax.PLATFORMS", [Platform.SENSOR]):
|
||||||
|
await setup_integration(hass, mock_cloud_config_entry)
|
||||||
|
|
||||||
|
# Should call both methods
|
||||||
|
mock_adax_cloud.fetch_rooms_info.assert_called_once()
|
||||||
|
mock_adax_cloud.get_rooms.assert_called_once()
|
||||||
|
|
||||||
|
await snapshot_platform(
|
||||||
|
hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id
|
||||||
|
)
|
@@ -53,7 +53,7 @@
|
|||||||
'temperature': 23.0,
|
'temperature': 23.0,
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
'supported_features': <WaterHeaterEntityFeature: 6>,
|
'supported_features': <WaterHeaterEntityFeature: 14>,
|
||||||
'target_temp_high': None,
|
'target_temp_high': None,
|
||||||
'target_temp_low': None,
|
'target_temp_low': None,
|
||||||
'temperature': None,
|
'temperature': None,
|
||||||
@@ -100,7 +100,7 @@
|
|||||||
'temperature': 23.0,
|
'temperature': 23.0,
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
'supported_features': <WaterHeaterEntityFeature: 6>,
|
'supported_features': <WaterHeaterEntityFeature: 14>,
|
||||||
'target_temp_high': None,
|
'target_temp_high': None,
|
||||||
'target_temp_low': None,
|
'target_temp_low': None,
|
||||||
'temperature': None,
|
'temperature': None,
|
||||||
|
@@ -25,7 +25,6 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
|
||||||
|
|
||||||
from .conftest import setup_evohome
|
from .conftest import setup_evohome
|
||||||
from .const import TEST_INSTALLS_WITH_DHW
|
from .const import TEST_INSTALLS_WITH_DHW
|
||||||
@@ -160,8 +159,8 @@ async def test_set_away_mode(hass: HomeAssistant, evohome: EvohomeClient) -> Non
|
|||||||
async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
||||||
"""Test SERVICE_TURN_OFF of an evohome DHW zone."""
|
"""Test SERVICE_TURN_OFF of an evohome DHW zone."""
|
||||||
|
|
||||||
# Entity water_heater.xxx does not support this service
|
# turn_off
|
||||||
with pytest.raises(HomeAssistantError):
|
with patch("evohomeasync2.hotwater.HotWater.off") as mock_fcn:
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
Platform.WATER_HEATER,
|
Platform.WATER_HEATER,
|
||||||
SERVICE_TURN_OFF,
|
SERVICE_TURN_OFF,
|
||||||
@@ -171,13 +170,15 @@ async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
|||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mock_fcn.assert_awaited_once_with()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
|
@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW)
|
||||||
async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
||||||
"""Test SERVICE_TURN_ON of an evohome DHW zone."""
|
"""Test SERVICE_TURN_ON of an evohome DHW zone."""
|
||||||
|
|
||||||
# Entity water_heater.xxx does not support this service
|
# turn_on
|
||||||
with pytest.raises(HomeAssistantError):
|
with patch("evohomeasync2.hotwater.HotWater.on") as mock_fcn:
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
Platform.WATER_HEATER,
|
Platform.WATER_HEATER,
|
||||||
SERVICE_TURN_ON,
|
SERVICE_TURN_ON,
|
||||||
@@ -186,3 +187,5 @@ async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None:
|
|||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mock_fcn.assert_awaited_once_with()
|
||||||
|
@@ -54,7 +54,7 @@ from homeassistant.setup import async_setup_component
|
|||||||
|
|
||||||
from .test_init import MOCK_ENVIRON
|
from .test_init import MOCK_ENVIRON
|
||||||
|
|
||||||
from tests.common import load_json_object_fixture, mock_platform
|
from tests.common import async_load_json_object_fixture, mock_platform
|
||||||
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||||
|
|
||||||
TEST_BACKUP = supervisor_backups.Backup(
|
TEST_BACKUP = supervisor_backups.Backup(
|
||||||
@@ -1018,8 +1018,10 @@ async def test_reader_writer_create_addon_folder_error(
|
|||||||
supervisor_client.jobs.get_job.side_effect = [
|
supervisor_client.jobs.get_job.side_effect = [
|
||||||
TEST_JOB_NOT_DONE,
|
TEST_JOB_NOT_DONE,
|
||||||
supervisor_jobs.Job.from_dict(
|
supervisor_jobs.Job.from_dict(
|
||||||
load_json_object_fixture(
|
(
|
||||||
"backup_done_with_addon_folder_errors.json", DOMAIN
|
await async_load_json_object_fixture(
|
||||||
|
hass, "backup_done_with_addon_folder_errors.json", DOMAIN
|
||||||
|
)
|
||||||
)["data"]
|
)["data"]
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@@ -176,8 +176,8 @@ async def test_hmip_dump_hap_config_services(
|
|||||||
assert write_mock.mock_calls
|
assert write_mock.mock_calls
|
||||||
|
|
||||||
|
|
||||||
async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None:
|
async def test_setup_services(hass: HomeAssistant) -> None:
|
||||||
"""Test setup services and unload services."""
|
"""Test setup services."""
|
||||||
mock_config = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC123", HMIPC_NAME: "name"}
|
mock_config = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC123", HMIPC_NAME: "name"}
|
||||||
MockConfigEntry(domain=DOMAIN, data=mock_config).add_to_hass(hass)
|
MockConfigEntry(domain=DOMAIN, data=mock_config).add_to_hass(hass)
|
||||||
|
|
||||||
@@ -201,46 +201,3 @@ async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None:
|
|||||||
assert len(config_entries) == 1
|
assert len(config_entries) == 1
|
||||||
|
|
||||||
await hass.config_entries.async_unload(config_entries[0].entry_id)
|
await hass.config_entries.async_unload(config_entries[0].entry_id)
|
||||||
# Check services are removed
|
|
||||||
assert not hass.services.async_services().get(DOMAIN)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None:
|
|
||||||
"""Test setup two access points and unload one by one and check services."""
|
|
||||||
|
|
||||||
# Setup AP1
|
|
||||||
mock_config = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC123", HMIPC_NAME: "name"}
|
|
||||||
MockConfigEntry(domain=DOMAIN, data=mock_config).add_to_hass(hass)
|
|
||||||
# Setup AP2
|
|
||||||
mock_config2 = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC1234", HMIPC_NAME: "name2"}
|
|
||||||
MockConfigEntry(domain=DOMAIN, data=mock_config2).add_to_hass(hass)
|
|
||||||
|
|
||||||
with patch("homeassistant.components.homematicip_cloud.HomematicipHAP") as mock_hap:
|
|
||||||
instance = mock_hap.return_value
|
|
||||||
instance.async_setup = AsyncMock(return_value=True)
|
|
||||||
instance.home.id = "1"
|
|
||||||
instance.home.modelType = "mock-type"
|
|
||||||
instance.home.name = "mock-name"
|
|
||||||
instance.home.label = "mock-label"
|
|
||||||
instance.home.currentAPVersion = "mock-ap-version"
|
|
||||||
instance.async_reset = AsyncMock(return_value=True)
|
|
||||||
|
|
||||||
assert await async_setup_component(hass, DOMAIN, {})
|
|
||||||
|
|
||||||
hmipc_services = hass.services.async_services()[DOMAIN]
|
|
||||||
assert len(hmipc_services) == 9
|
|
||||||
|
|
||||||
config_entries = hass.config_entries.async_entries(DOMAIN)
|
|
||||||
assert len(config_entries) == 2
|
|
||||||
# unload the first AP
|
|
||||||
await hass.config_entries.async_unload(config_entries[0].entry_id)
|
|
||||||
|
|
||||||
# services still exists
|
|
||||||
hmipc_services = hass.services.async_services()[DOMAIN]
|
|
||||||
assert len(hmipc_services) == 9
|
|
||||||
|
|
||||||
# unload the second AP
|
|
||||||
await hass.config_entries.async_unload(config_entries[1].entry_id)
|
|
||||||
|
|
||||||
# Check services are removed
|
|
||||||
assert not hass.services.async_services().get(DOMAIN)
|
|
||||||
|
@@ -15,7 +15,7 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
|||||||
|
|
||||||
from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA, MOCK_SERIAL
|
from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA, MOCK_SERIAL
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_json_object_fixture
|
from tests.common import MockConfigEntry, async_load_json_object_fixture
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("mock_hunterdouglas_hub")
|
@pytest.mark.usefixtures("mock_hunterdouglas_hub")
|
||||||
@@ -330,7 +330,9 @@ async def test_form_unsupported_device(
|
|||||||
# Simulate a gen 3 secondary hub
|
# Simulate a gen 3 secondary hub
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data",
|
"homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data",
|
||||||
return_value=load_json_object_fixture("gen3/gateway/secondary.json", DOMAIN),
|
return_value=await async_load_json_object_fixture(
|
||||||
|
hass, "gen3/gateway/secondary.json", DOMAIN
|
||||||
|
),
|
||||||
):
|
):
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
|
@@ -6,6 +6,7 @@ import pytest
|
|||||||
from xknx.telegram.apci import GroupValueResponse, GroupValueWrite
|
from xknx.telegram.apci import GroupValueResponse, GroupValueWrite
|
||||||
|
|
||||||
from homeassistant.components.knx import async_unload_entry as knx_async_unload_entry
|
from homeassistant.components.knx import async_unload_entry as knx_async_unload_entry
|
||||||
|
from homeassistant.components.knx.const import DOMAIN
|
||||||
from homeassistant.const import STATE_OFF, STATE_ON
|
from homeassistant.const import STATE_OFF, STATE_ON
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
@@ -295,4 +296,5 @@ async def test_service_setup_failed(hass: HomeAssistant, knx: KNXTestKit) -> Non
|
|||||||
{"address": "1/2/3", "payload": True, "response": False},
|
{"address": "1/2/3", "payload": True, "response": False},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
assert str(exc_info.value) == "KNX entry not loaded"
|
assert exc_info.value.translation_domain == DOMAIN
|
||||||
|
assert exc_info.value.translation_key == "integration_not_loaded"
|
||||||
|
@@ -29,7 +29,7 @@ async def test_nexia_sensor_switch(
|
|||||||
hass: HomeAssistant, freezer: FrozenDateTimeFactory
|
hass: HomeAssistant, freezer: FrozenDateTimeFactory
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test NexiaRoomIQSensorSwitch."""
|
"""Test NexiaRoomIQSensorSwitch."""
|
||||||
await async_init_integration(hass, house_fixture="nexia/sensors_xl1050_house.json")
|
await async_init_integration(hass, house_fixture="sensors_xl1050_house.json")
|
||||||
sw1_id = f"{Platform.SWITCH}.center_nativezone_include_center"
|
sw1_id = f"{Platform.SWITCH}.center_nativezone_include_center"
|
||||||
sw1 = {ATTR_ENTITY_ID: sw1_id}
|
sw1 = {ATTR_ENTITY_ID: sw1_id}
|
||||||
sw2_id = f"{Platform.SWITCH}.center_nativezone_include_upstairs"
|
sw2_id = f"{Platform.SWITCH}.center_nativezone_include_upstairs"
|
||||||
|
@@ -9,7 +9,7 @@ from homeassistant.components.nexia.const import DOMAIN
|
|||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_fixture
|
from tests.common import MockConfigEntry, async_load_fixture
|
||||||
from tests.test_util.aiohttp import mock_aiohttp_client
|
from tests.test_util.aiohttp import mock_aiohttp_client
|
||||||
|
|
||||||
|
|
||||||
@@ -18,13 +18,13 @@ async def async_init_integration(
|
|||||||
skip_setup: bool = False,
|
skip_setup: bool = False,
|
||||||
exception: Exception | None = None,
|
exception: Exception | None = None,
|
||||||
*,
|
*,
|
||||||
house_fixture="nexia/mobile_houses_123456.json",
|
house_fixture="mobile_houses_123456.json",
|
||||||
) -> MockConfigEntry:
|
) -> MockConfigEntry:
|
||||||
"""Set up the nexia integration in Home Assistant."""
|
"""Set up the nexia integration in Home Assistant."""
|
||||||
|
|
||||||
session_fixture = "nexia/session_123456.json"
|
session_fixture = "session_123456.json"
|
||||||
sign_in_fixture = "nexia/sign_in.json"
|
sign_in_fixture = "sign_in.json"
|
||||||
set_fan_speed_fixture = "nexia/set_fan_speed_2293892.json"
|
set_fan_speed_fixture = "set_fan_speed_2293892.json"
|
||||||
with (
|
with (
|
||||||
mock_aiohttp_client() as mock_session,
|
mock_aiohttp_client() as mock_session,
|
||||||
patch("nexia.home.load_or_create_uuid", return_value=uuid.uuid4()),
|
patch("nexia.home.load_or_create_uuid", return_value=uuid.uuid4()),
|
||||||
@@ -40,19 +40,20 @@ async def async_init_integration(
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mock_session.post(
|
mock_session.post(
|
||||||
nexia.API_MOBILE_SESSION_URL, text=load_fixture(session_fixture)
|
nexia.API_MOBILE_SESSION_URL,
|
||||||
|
text=await async_load_fixture(hass, session_fixture, DOMAIN),
|
||||||
)
|
)
|
||||||
mock_session.get(
|
mock_session.get(
|
||||||
nexia.API_MOBILE_HOUSES_URL.format(house_id=123456),
|
nexia.API_MOBILE_HOUSES_URL.format(house_id=123456),
|
||||||
text=load_fixture(house_fixture),
|
text=await async_load_fixture(hass, house_fixture, DOMAIN),
|
||||||
)
|
)
|
||||||
mock_session.post(
|
mock_session.post(
|
||||||
nexia.API_MOBILE_ACCOUNTS_SIGN_IN_URL,
|
nexia.API_MOBILE_ACCOUNTS_SIGN_IN_URL,
|
||||||
text=load_fixture(sign_in_fixture),
|
text=await async_load_fixture(hass, sign_in_fixture, DOMAIN),
|
||||||
)
|
)
|
||||||
mock_session.post(
|
mock_session.post(
|
||||||
"https://www.mynexia.com/mobile/xxl_thermostats/2293892/fan_speed",
|
"https://www.mynexia.com/mobile/xxl_thermostats/2293892/fan_speed",
|
||||||
text=load_fixture(set_fan_speed_fixture),
|
text=await async_load_fixture(hass, set_fan_speed_fixture, DOMAIN),
|
||||||
)
|
)
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
|
@@ -3,21 +3,16 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.nmbs.config_flow import CONF_EXCLUDE_VIAS
|
from homeassistant.components.nmbs.config_flow import CONF_EXCLUDE_VIAS
|
||||||
from homeassistant.components.nmbs.const import (
|
from homeassistant.components.nmbs.const import (
|
||||||
CONF_STATION_FROM,
|
CONF_STATION_FROM,
|
||||||
CONF_STATION_LIVE,
|
|
||||||
CONF_STATION_TO,
|
CONF_STATION_TO,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
from homeassistant.config_entries import SOURCE_USER
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import FlowResultType
|
from homeassistant.data_entry_flow import FlowResultType
|
||||||
from homeassistant.helpers import entity_registry as er
|
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
@@ -150,192 +145,3 @@ async def test_unavailable_api(
|
|||||||
|
|
||||||
assert result["type"] is FlowResultType.ABORT
|
assert result["type"] is FlowResultType.ABORT
|
||||||
assert result["reason"] == "api_unavailable"
|
assert result["reason"] == "api_unavailable"
|
||||||
|
|
||||||
|
|
||||||
async def test_import(
|
|
||||||
hass: HomeAssistant, mock_nmbs_client: AsyncMock, mock_setup_entry: AsyncMock
|
|
||||||
) -> None:
|
|
||||||
"""Test starting a flow by user which filled in data for connection."""
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data={
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_LIVE: DUMMY_DATA_IMPORT["STAT_BRUSSELS_CENTRAL"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
|
||||||
assert (
|
|
||||||
result["title"]
|
|
||||||
== "Train from Brussel-Noord/Bruxelles-Nord to Brussel-Zuid/Bruxelles-Midi"
|
|
||||||
)
|
|
||||||
assert result["data"] == {
|
|
||||||
CONF_STATION_FROM: "BE.NMBS.008812005",
|
|
||||||
CONF_STATION_LIVE: "BE.NMBS.008813003",
|
|
||||||
CONF_STATION_TO: "BE.NMBS.008814001",
|
|
||||||
}
|
|
||||||
assert (
|
|
||||||
result["result"].unique_id
|
|
||||||
== f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_step_import_abort_if_already_setup(
|
|
||||||
hass: HomeAssistant, mock_nmbs_client: AsyncMock, mock_config_entry: MockConfigEntry
|
|
||||||
) -> None:
|
|
||||||
"""Test starting a flow by user which filled in data for connection for already existing connection."""
|
|
||||||
mock_config_entry.add_to_hass(hass)
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data={
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert result["type"] is FlowResultType.ABORT
|
|
||||||
assert result["reason"] == "already_configured"
|
|
||||||
|
|
||||||
|
|
||||||
async def test_unavailable_api_import(
|
|
||||||
hass: HomeAssistant, mock_nmbs_client: AsyncMock
|
|
||||||
) -> None:
|
|
||||||
"""Test starting a flow by import and api is unavailable."""
|
|
||||||
mock_nmbs_client.get_stations.return_value = None
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data={
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_LIVE: DUMMY_DATA_IMPORT["STAT_BRUSSELS_CENTRAL"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.ABORT
|
|
||||||
assert result["reason"] == "api_unavailable"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("config", "reason"),
|
|
||||||
[
|
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_TO: "Utrecht Centraal",
|
|
||||||
},
|
|
||||||
"invalid_station",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_STATION_FROM: "Utrecht Centraal",
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
"invalid_station",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
},
|
|
||||||
"same_station",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_invalid_station_name(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
mock_nmbs_client: AsyncMock,
|
|
||||||
config: dict[str, Any],
|
|
||||||
reason: str,
|
|
||||||
) -> None:
|
|
||||||
"""Test importing invalid YAML."""
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data=config,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.ABORT
|
|
||||||
assert result["reason"] == reason
|
|
||||||
|
|
||||||
|
|
||||||
async def test_sensor_id_migration_standardname(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
mock_nmbs_client: AsyncMock,
|
|
||||||
entity_registry: er.EntityRegistry,
|
|
||||||
) -> None:
|
|
||||||
"""Test migrating unique id."""
|
|
||||||
old_unique_id = (
|
|
||||||
f"live_{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
|
||||||
)
|
|
||||||
new_unique_id = (
|
|
||||||
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
|
||||||
)
|
|
||||||
old_entry = entity_registry.async_get_or_create(
|
|
||||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
|
||||||
)
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data={
|
|
||||||
CONF_STATION_LIVE: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
|
||||||
config_entry_id = result["result"].entry_id
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
entities = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
|
||||||
assert len(entities) == 3
|
|
||||||
entities_map = {entity.unique_id: entity for entity in entities}
|
|
||||||
assert old_unique_id not in entities_map
|
|
||||||
assert new_unique_id in entities_map
|
|
||||||
assert entities_map[new_unique_id].id == old_entry.id
|
|
||||||
|
|
||||||
|
|
||||||
async def test_sensor_id_migration_localized_name(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
mock_nmbs_client: AsyncMock,
|
|
||||||
entity_registry: er.EntityRegistry,
|
|
||||||
) -> None:
|
|
||||||
"""Test migrating unique id."""
|
|
||||||
old_unique_id = (
|
|
||||||
f"live_{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
|
||||||
)
|
|
||||||
new_unique_id = (
|
|
||||||
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
|
||||||
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
|
||||||
)
|
|
||||||
old_entry = entity_registry.async_get_or_create(
|
|
||||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
|
||||||
)
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_IMPORT},
|
|
||||||
data={
|
|
||||||
CONF_STATION_LIVE: DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_FROM: DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"],
|
|
||||||
CONF_STATION_TO: DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_SOUTH"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
|
||||||
config_entry_id = result["result"].entry_id
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
entities = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
|
||||||
assert len(entities) == 3
|
|
||||||
entities_map = {entity.unique_id: entity for entity in entities}
|
|
||||||
assert old_unique_id not in entities_map
|
|
||||||
assert new_unique_id in entities_map
|
|
||||||
assert entities_map[new_unique_id].id == old_entry.id
|
|
||||||
|
@@ -9,8 +9,8 @@ from .mock import MOCK_INFO, setup_nuki_integration
|
|||||||
|
|
||||||
from tests.common import (
|
from tests.common import (
|
||||||
MockConfigEntry,
|
MockConfigEntry,
|
||||||
load_json_array_fixture,
|
async_load_json_array_fixture,
|
||||||
load_json_object_fixture,
|
async_load_json_object_fixture,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -21,15 +21,19 @@ async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
|||||||
mock.get("http://1.1.1.1:8080/info", json=MOCK_INFO)
|
mock.get("http://1.1.1.1:8080/info", json=MOCK_INFO)
|
||||||
mock.get(
|
mock.get(
|
||||||
"http://1.1.1.1:8080/list",
|
"http://1.1.1.1:8080/list",
|
||||||
json=load_json_array_fixture("list.json", DOMAIN),
|
json=await async_load_json_array_fixture(hass, "list.json", DOMAIN),
|
||||||
)
|
)
|
||||||
mock.get(
|
mock.get(
|
||||||
"http://1.1.1.1:8080/callback/list",
|
"http://1.1.1.1:8080/callback/list",
|
||||||
json=load_json_object_fixture("callback_list.json", DOMAIN),
|
json=await async_load_json_object_fixture(
|
||||||
|
hass, "callback_list.json", DOMAIN
|
||||||
|
),
|
||||||
)
|
)
|
||||||
mock.get(
|
mock.get(
|
||||||
"http://1.1.1.1:8080/callback/add",
|
"http://1.1.1.1:8080/callback/add",
|
||||||
json=load_json_object_fixture("callback_add.json", DOMAIN),
|
json=await async_load_json_object_fixture(
|
||||||
|
hass, "callback_add.json", DOMAIN
|
||||||
|
),
|
||||||
)
|
)
|
||||||
entry = await setup_nuki_integration(hass)
|
entry = await setup_nuki_integration(hass)
|
||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
@@ -8,7 +8,7 @@ from homeassistant.components.overkiz.const import DOMAIN
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_json_object_fixture
|
from tests.common import MockConfigEntry, async_load_json_object_fixture
|
||||||
from tests.components.diagnostics import (
|
from tests.components.diagnostics import (
|
||||||
get_diagnostics_for_config_entry,
|
get_diagnostics_for_config_entry,
|
||||||
get_diagnostics_for_device,
|
get_diagnostics_for_device,
|
||||||
@@ -23,7 +23,9 @@ async def test_diagnostics(
|
|||||||
snapshot: SnapshotAssertion,
|
snapshot: SnapshotAssertion,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test diagnostics."""
|
"""Test diagnostics."""
|
||||||
diagnostic_data = load_json_object_fixture("overkiz/setup_tahoma_switch.json")
|
diagnostic_data = await async_load_json_object_fixture(
|
||||||
|
hass, "setup_tahoma_switch.json", DOMAIN
|
||||||
|
)
|
||||||
|
|
||||||
with patch.multiple(
|
with patch.multiple(
|
||||||
"pyoverkiz.client.OverkizClient",
|
"pyoverkiz.client.OverkizClient",
|
||||||
@@ -44,7 +46,9 @@ async def test_device_diagnostics(
|
|||||||
snapshot: SnapshotAssertion,
|
snapshot: SnapshotAssertion,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test device diagnostics."""
|
"""Test device diagnostics."""
|
||||||
diagnostic_data = load_json_object_fixture("overkiz/setup_tahoma_switch.json")
|
diagnostic_data = await async_load_json_object_fixture(
|
||||||
|
hass, "setup_tahoma_switch.json", DOMAIN
|
||||||
|
)
|
||||||
|
|
||||||
device = device_registry.async_get_device(
|
device = device_registry.async_get_device(
|
||||||
identifiers={(DOMAIN, "rts://****-****-6867/16756006")}
|
identifiers={(DOMAIN, "rts://****-****-6867/16756006")}
|
||||||
|
@@ -409,7 +409,7 @@ async def test_update_ws_connection_failure(
|
|||||||
patch.object(
|
patch.object(
|
||||||
remote_websocket,
|
remote_websocket,
|
||||||
"start_listening",
|
"start_listening",
|
||||||
side_effect=ConnectionFailure('{"event": "ms.voiceApp.hide"}'),
|
side_effect=ConnectionFailure({"event": "ms.voiceApp.hide"}),
|
||||||
),
|
),
|
||||||
patch.object(remote_websocket, "is_alive", return_value=False),
|
patch.object(remote_websocket, "is_alive", return_value=False),
|
||||||
):
|
):
|
||||||
@@ -419,7 +419,7 @@ async def test_update_ws_connection_failure(
|
|||||||
|
|
||||||
assert (
|
assert (
|
||||||
"Unexpected ConnectionFailure trying to get remote for fake_host, please "
|
"Unexpected ConnectionFailure trying to get remote for fake_host, please "
|
||||||
'report this issue: ConnectionFailure(\'{"event": "ms.voiceApp.hide"}\')'
|
"report this issue: ConnectionFailure({'event': 'ms.voiceApp.hide'})"
|
||||||
in caplog.text
|
in caplog.text
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -427,6 +427,37 @@ async def test_update_ws_connection_failure(
|
|||||||
assert state.state == STATE_OFF
|
assert state.state == STATE_OFF
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("rest_api")
|
||||||
|
async def test_update_ws_connection_failure_channel_timeout(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
remote_websocket: Mock,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Testing update tv connection failure exception."""
|
||||||
|
await setup_samsungtv_entry(hass, MOCK_CONFIGWS)
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
remote_websocket,
|
||||||
|
"start_listening",
|
||||||
|
side_effect=ConnectionFailure({"event": "ms.channel.timeOut"}),
|
||||||
|
),
|
||||||
|
patch.object(remote_websocket, "is_alive", return_value=False),
|
||||||
|
):
|
||||||
|
freezer.tick(timedelta(minutes=5))
|
||||||
|
async_fire_time_changed(hass)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"Channel timeout occurred trying to get remote for fake_host: "
|
||||||
|
"ConnectionFailure({'event': 'ms.channel.timeOut'})" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
state = hass.states.get(ENTITY_ID)
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("rest_api")
|
@pytest.mark.usefixtures("rest_api")
|
||||||
async def test_update_ws_connection_closed(
|
async def test_update_ws_connection_closed(
|
||||||
hass: HomeAssistant, freezer: FrozenDateTimeFactory, remote_websocket: Mock
|
hass: HomeAssistant, freezer: FrozenDateTimeFactory, remote_websocket: Mock
|
||||||
|
@@ -12,7 +12,7 @@ from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_fixture
|
from tests.common import MockConfigEntry, async_load_fixture
|
||||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||||
|
|
||||||
USERNAME = "user"
|
USERNAME = "user"
|
||||||
@@ -53,39 +53,41 @@ def create_entry(hass: HomeAssistant) -> MockConfigEntry:
|
|||||||
return entry
|
return entry
|
||||||
|
|
||||||
|
|
||||||
async def set_aioclient_responses(aioclient_mock: AiohttpClientMocker) -> None:
|
async def set_aioclient_responses(
|
||||||
|
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
"""Set AioClient responses."""
|
"""Set AioClient responses."""
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/{DEVICE_ID}/info/",
|
f"{BASE_URL}devices/{DEVICE_ID}/info/",
|
||||||
text=load_fixture("skybell/device_info.json"),
|
text=await async_load_fixture(hass, "device_info.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/{DEVICE_ID}/settings/",
|
f"{BASE_URL}devices/{DEVICE_ID}/settings/",
|
||||||
text=load_fixture("skybell/device_settings.json"),
|
text=await async_load_fixture(hass, "device_settings.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/{DEVICE_ID}/activities/",
|
f"{BASE_URL}devices/{DEVICE_ID}/activities/",
|
||||||
text=load_fixture("skybell/activities.json"),
|
text=await async_load_fixture(hass, "activities.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/",
|
f"{BASE_URL}devices/",
|
||||||
text=load_fixture("skybell/device.json"),
|
text=await async_load_fixture(hass, "device.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
USERS_ME_URL,
|
USERS_ME_URL,
|
||||||
text=load_fixture("skybell/me.json"),
|
text=await async_load_fixture(hass, "me.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.post(
|
aioclient_mock.post(
|
||||||
f"{BASE_URL}login/",
|
f"{BASE_URL}login/",
|
||||||
text=load_fixture("skybell/login.json"),
|
text=await async_load_fixture(hass, "login.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/{DEVICE_ID}/activities/1234567890ab1234567890ac/video/",
|
f"{BASE_URL}devices/{DEVICE_ID}/activities/1234567890ab1234567890ac/video/",
|
||||||
text=load_fixture("skybell/video.json"),
|
text=await async_load_fixture(hass, "video.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"{BASE_URL}devices/{DEVICE_ID}/avatar/",
|
f"{BASE_URL}devices/{DEVICE_ID}/avatar/",
|
||||||
text=load_fixture("skybell/avatar.json"),
|
text=await async_load_fixture(hass, "avatar.json", DOMAIN),
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
f"https://v3-production-devices-avatar.s3.us-west-2.amazonaws.com/{DEVICE_ID}.jpg",
|
f"https://v3-production-devices-avatar.s3.us-west-2.amazonaws.com/{DEVICE_ID}.jpg",
|
||||||
@@ -96,12 +98,12 @@ async def set_aioclient_responses(aioclient_mock: AiohttpClientMocker) -> None:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def connection(aioclient_mock: AiohttpClientMocker) -> None:
|
async def connection(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
||||||
"""Fixture for good connection responses."""
|
"""Fixture for good connection responses."""
|
||||||
await set_aioclient_responses(aioclient_mock)
|
await set_aioclient_responses(hass, aioclient_mock)
|
||||||
|
|
||||||
|
|
||||||
def create_skybell(hass: HomeAssistant) -> Skybell:
|
async def create_skybell(hass: HomeAssistant) -> Skybell:
|
||||||
"""Create Skybell object."""
|
"""Create Skybell object."""
|
||||||
skybell = Skybell(
|
skybell = Skybell(
|
||||||
username=USERNAME,
|
username=USERNAME,
|
||||||
@@ -109,14 +111,15 @@ def create_skybell(hass: HomeAssistant) -> Skybell:
|
|||||||
get_devices=True,
|
get_devices=True,
|
||||||
session=async_get_clientsession(hass),
|
session=async_get_clientsession(hass),
|
||||||
)
|
)
|
||||||
skybell._cache = orjson.loads(load_fixture("skybell/cache.json"))
|
skybell._cache = orjson.loads(await async_load_fixture(hass, "cache.json", DOMAIN))
|
||||||
return skybell
|
return skybell
|
||||||
|
|
||||||
|
|
||||||
def mock_skybell(hass: HomeAssistant):
|
async def mock_skybell(hass: HomeAssistant):
|
||||||
"""Mock Skybell object."""
|
"""Mock Skybell object."""
|
||||||
return patch(
|
return patch(
|
||||||
"homeassistant.components.skybell.Skybell", return_value=create_skybell(hass)
|
"homeassistant.components.skybell.Skybell",
|
||||||
|
return_value=await create_skybell(hass),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -124,7 +127,7 @@ async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
|||||||
"""Set up the Skybell integration in Home Assistant."""
|
"""Set up the Skybell integration in Home Assistant."""
|
||||||
config_entry = create_entry(hass)
|
config_entry = create_entry(hass)
|
||||||
|
|
||||||
with mock_skybell(hass), patch("aioskybell.utils.async_save_cache"):
|
with await mock_skybell(hass), patch("aioskybell.utils.async_save_cache"):
|
||||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
@@ -6,6 +6,7 @@ from collections.abc import Generator
|
|||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from pysmarlaapi.classes import AuthToken
|
from pysmarlaapi.classes import AuthToken
|
||||||
|
from pysmarlaapi.federwiege.classes import Property, Service
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.smarla.const import DOMAIN
|
from homeassistant.components.smarla.const import DOMAIN
|
||||||
@@ -60,4 +61,22 @@ def mock_federwiege(mock_connection: MagicMock) -> Generator[MagicMock]:
|
|||||||
) as mock_federwiege:
|
) as mock_federwiege:
|
||||||
federwiege = mock_federwiege.return_value
|
federwiege = mock_federwiege.return_value
|
||||||
federwiege.serial_number = MOCK_SERIAL_NUMBER
|
federwiege.serial_number = MOCK_SERIAL_NUMBER
|
||||||
|
|
||||||
|
mock_babywiege_service = MagicMock(spec=Service)
|
||||||
|
mock_babywiege_service.props = {
|
||||||
|
"swing_active": MagicMock(spec=Property),
|
||||||
|
"smart_mode": MagicMock(spec=Property),
|
||||||
|
}
|
||||||
|
|
||||||
|
mock_babywiege_service.props["swing_active"].get.return_value = False
|
||||||
|
mock_babywiege_service.props["smart_mode"].get.return_value = False
|
||||||
|
|
||||||
|
federwiege.services = {
|
||||||
|
"babywiege": mock_babywiege_service,
|
||||||
|
}
|
||||||
|
|
||||||
|
federwiege.get_property = MagicMock(
|
||||||
|
side_effect=lambda service, prop: federwiege.services[service].props[prop]
|
||||||
|
)
|
||||||
|
|
||||||
yield federwiege
|
yield federwiege
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from pysmarlaapi.federwiege.classes import Property
|
|
||||||
import pytest
|
import pytest
|
||||||
from syrupy.assertion import SnapshotAssertion
|
from syrupy.assertion import SnapshotAssertion
|
||||||
|
|
||||||
@@ -22,26 +21,28 @@ from . import setup_integration, update_property_listeners
|
|||||||
|
|
||||||
from tests.common import MockConfigEntry, snapshot_platform
|
from tests.common import MockConfigEntry, snapshot_platform
|
||||||
|
|
||||||
|
SWITCH_ENTITIES = [
|
||||||
@pytest.fixture
|
{
|
||||||
def mock_switch_property() -> MagicMock:
|
"entity_id": "switch.smarla",
|
||||||
"""Mock a switch property."""
|
"service": "babywiege",
|
||||||
mock = MagicMock(spec=Property)
|
"property": "swing_active",
|
||||||
mock.get.return_value = False
|
},
|
||||||
return mock
|
{
|
||||||
|
"entity_id": "switch.smarla_smart_mode",
|
||||||
|
"service": "babywiege",
|
||||||
|
"property": "smart_mode",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def test_entities(
|
async def test_entities(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_federwiege: MagicMock,
|
mock_federwiege: MagicMock,
|
||||||
mock_switch_property: MagicMock,
|
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
entity_registry: er.EntityRegistry,
|
entity_registry: er.EntityRegistry,
|
||||||
snapshot: SnapshotAssertion,
|
snapshot: SnapshotAssertion,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the Smarla entities."""
|
"""Test the Smarla entities."""
|
||||||
mock_federwiege.get_property.return_value = mock_switch_property
|
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch("homeassistant.components.smarla.PLATFORMS", [Platform.SWITCH]),
|
patch("homeassistant.components.smarla.PLATFORMS", [Platform.SWITCH]),
|
||||||
):
|
):
|
||||||
@@ -59,45 +60,55 @@ async def test_entities(
|
|||||||
(SERVICE_TURN_OFF, False),
|
(SERVICE_TURN_OFF, False),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@pytest.mark.parametrize("entity_info", SWITCH_ENTITIES)
|
||||||
async def test_switch_action(
|
async def test_switch_action(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
mock_federwiege: MagicMock,
|
mock_federwiege: MagicMock,
|
||||||
mock_switch_property: MagicMock,
|
entity_info: dict[str, str],
|
||||||
service: str,
|
service: str,
|
||||||
parameter: bool,
|
parameter: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test Smarla Switch on/off behavior."""
|
"""Test Smarla Switch on/off behavior."""
|
||||||
mock_federwiege.get_property.return_value = mock_switch_property
|
|
||||||
|
|
||||||
assert await setup_integration(hass, mock_config_entry)
|
assert await setup_integration(hass, mock_config_entry)
|
||||||
|
|
||||||
|
mock_switch_property = mock_federwiege.get_property(
|
||||||
|
entity_info["service"], entity_info["property"]
|
||||||
|
)
|
||||||
|
|
||||||
|
entity_id = entity_info["entity_id"]
|
||||||
|
|
||||||
# Turn on
|
# Turn on
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
SWITCH_DOMAIN,
|
SWITCH_DOMAIN,
|
||||||
service,
|
service,
|
||||||
{ATTR_ENTITY_ID: "switch.smarla"},
|
{ATTR_ENTITY_ID: entity_id},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
mock_switch_property.set.assert_called_once_with(parameter)
|
mock_switch_property.set.assert_called_once_with(parameter)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("entity_info", SWITCH_ENTITIES)
|
||||||
async def test_switch_state_update(
|
async def test_switch_state_update(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
mock_federwiege: MagicMock,
|
mock_federwiege: MagicMock,
|
||||||
mock_switch_property: MagicMock,
|
entity_info: dict[str, str],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test Smarla Switch callback."""
|
"""Test Smarla Switch callback."""
|
||||||
mock_federwiege.get_property.return_value = mock_switch_property
|
|
||||||
|
|
||||||
assert await setup_integration(hass, mock_config_entry)
|
assert await setup_integration(hass, mock_config_entry)
|
||||||
|
|
||||||
assert hass.states.get("switch.smarla").state == STATE_OFF
|
mock_switch_property = mock_federwiege.get_property(
|
||||||
|
entity_info["service"], entity_info["property"]
|
||||||
|
)
|
||||||
|
|
||||||
|
entity_id = entity_info["entity_id"]
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state == STATE_OFF
|
||||||
|
|
||||||
mock_switch_property.get.return_value = True
|
mock_switch_property.get.return_value = True
|
||||||
|
|
||||||
await update_property_listeners(mock_switch_property)
|
await update_property_listeners(mock_switch_property)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert hass.states.get("switch.smarla").state == STATE_ON
|
assert hass.states.get(entity_id).state == STATE_ON
|
||||||
|
@@ -226,14 +226,22 @@ class SoCoMockFactory:
|
|||||||
mock_soco.add_uri_to_queue = Mock(return_value=10)
|
mock_soco.add_uri_to_queue = Mock(return_value=10)
|
||||||
|
|
||||||
mock_soco.avTransport = SonosMockService("AVTransport", ip_address)
|
mock_soco.avTransport = SonosMockService("AVTransport", ip_address)
|
||||||
|
mock_soco.avTransport.GetPositionInfo = Mock(
|
||||||
|
return_value=self.current_track_info
|
||||||
|
)
|
||||||
mock_soco.renderingControl = SonosMockService("RenderingControl", ip_address)
|
mock_soco.renderingControl = SonosMockService("RenderingControl", ip_address)
|
||||||
mock_soco.zoneGroupTopology = SonosMockService("ZoneGroupTopology", ip_address)
|
mock_soco.zoneGroupTopology = SonosMockService("ZoneGroupTopology", ip_address)
|
||||||
mock_soco.contentDirectory = SonosMockService("ContentDirectory", ip_address)
|
mock_soco.contentDirectory = SonosMockService("ContentDirectory", ip_address)
|
||||||
mock_soco.deviceProperties = SonosMockService("DeviceProperties", ip_address)
|
mock_soco.deviceProperties = SonosMockService("DeviceProperties", ip_address)
|
||||||
|
mock_soco.zone_group_state = Mock()
|
||||||
|
mock_soco.zone_group_state.processed_count = 10
|
||||||
|
mock_soco.zone_group_state.total_requests = 12
|
||||||
|
|
||||||
mock_soco.alarmClock = self.alarm_clock
|
mock_soco.alarmClock = self.alarm_clock
|
||||||
mock_soco.get_battery_info.return_value = self.battery_info
|
mock_soco.get_battery_info.return_value = self.battery_info
|
||||||
mock_soco.all_zones = {mock_soco}
|
mock_soco.all_zones = {mock_soco}
|
||||||
mock_soco.group.coordinator = mock_soco
|
mock_soco.group.coordinator = mock_soco
|
||||||
|
mock_soco.household_id = "test_household_id"
|
||||||
self.mock_list[ip_address] = mock_soco
|
self.mock_list[ip_address] = mock_soco
|
||||||
return mock_soco
|
return mock_soco
|
||||||
|
|
||||||
|
182
tests/components/sonos/snapshots/test_diagnostics.ambr
Normal file
182
tests/components/sonos/snapshots/test_diagnostics.ambr
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
# serializer version: 1
|
||||||
|
# name: test_diagnostics_config_entry
|
||||||
|
dict({
|
||||||
|
'discovered': dict({
|
||||||
|
'RINCON_test': dict({
|
||||||
|
'_group_members_missing': list([
|
||||||
|
]),
|
||||||
|
'_last_activity': -1200.0,
|
||||||
|
'_last_event_cache': dict({
|
||||||
|
}),
|
||||||
|
'activity_stats': dict({
|
||||||
|
}),
|
||||||
|
'available': True,
|
||||||
|
'battery_info': dict({
|
||||||
|
'Health': 'GREEN',
|
||||||
|
'Level': 100,
|
||||||
|
'PowerSource': 'SONOS_CHARGING_RING',
|
||||||
|
'Temperature': 'NORMAL',
|
||||||
|
}),
|
||||||
|
'enabled_entities': list([
|
||||||
|
'binary_sensor.zone_a_charging',
|
||||||
|
'binary_sensor.zone_a_microphone',
|
||||||
|
'media_player.zone_a',
|
||||||
|
'number.zone_a_audio_delay',
|
||||||
|
'number.zone_a_balance',
|
||||||
|
'number.zone_a_bass',
|
||||||
|
'number.zone_a_music_surround_level',
|
||||||
|
'number.zone_a_sub_gain',
|
||||||
|
'number.zone_a_surround_level',
|
||||||
|
'number.zone_a_treble',
|
||||||
|
'sensor.zone_a_audio_input_format',
|
||||||
|
'sensor.zone_a_battery',
|
||||||
|
'switch.sonos_alarm_14',
|
||||||
|
'switch.zone_a_crossfade',
|
||||||
|
'switch.zone_a_loudness',
|
||||||
|
'switch.zone_a_night_sound',
|
||||||
|
'switch.zone_a_speech_enhancement',
|
||||||
|
'switch.zone_a_subwoofer_enabled',
|
||||||
|
'switch.zone_a_surround_enabled',
|
||||||
|
'switch.zone_a_surround_music_full_volume',
|
||||||
|
]),
|
||||||
|
'event_stats': dict({
|
||||||
|
'soco:parse_event_xml': list([
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
128,
|
||||||
|
0,
|
||||||
|
]),
|
||||||
|
}),
|
||||||
|
'hardware_version': '1.20.1.6-1.1',
|
||||||
|
'household_id': 'test_household_id',
|
||||||
|
'is_coordinator': True,
|
||||||
|
'media': dict({
|
||||||
|
'album_name': None,
|
||||||
|
'artist': None,
|
||||||
|
'channel': None,
|
||||||
|
'current_track_poll': dict({
|
||||||
|
'album': '',
|
||||||
|
'album_art': '',
|
||||||
|
'artist': '',
|
||||||
|
'duration': 'NOT_IMPLEMENTED',
|
||||||
|
'duration_in_s': None,
|
||||||
|
'metadata': 'NOT_IMPLEMENTED',
|
||||||
|
'playlist_position': '1',
|
||||||
|
'position': 'NOT_IMPLEMENTED',
|
||||||
|
'position_in_s': None,
|
||||||
|
'title': '',
|
||||||
|
'uri': '',
|
||||||
|
}),
|
||||||
|
'duration': None,
|
||||||
|
'image_url': None,
|
||||||
|
'playlist_name': None,
|
||||||
|
'queue_position': None,
|
||||||
|
'source_name': None,
|
||||||
|
'title': None,
|
||||||
|
'uri': None,
|
||||||
|
}),
|
||||||
|
'model_name': 'Model Name',
|
||||||
|
'model_number': 'S12',
|
||||||
|
'software_version': '49.2-64250',
|
||||||
|
'subscription_address': '192.168.42.2:8080',
|
||||||
|
'subscriptions_failed': False,
|
||||||
|
'version': '13.1',
|
||||||
|
'zone_group_state_stats': dict({
|
||||||
|
'processed': 10,
|
||||||
|
'total_requests': 12,
|
||||||
|
}),
|
||||||
|
'zone_name': 'Zone A',
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
'discovery_known': list([
|
||||||
|
'RINCON_test',
|
||||||
|
]),
|
||||||
|
})
|
||||||
|
# ---
|
||||||
|
# name: test_diagnostics_device
|
||||||
|
dict({
|
||||||
|
'_group_members_missing': list([
|
||||||
|
]),
|
||||||
|
'_last_activity': -1200.0,
|
||||||
|
'_last_event_cache': dict({
|
||||||
|
}),
|
||||||
|
'activity_stats': dict({
|
||||||
|
}),
|
||||||
|
'available': True,
|
||||||
|
'battery_info': dict({
|
||||||
|
'Health': 'GREEN',
|
||||||
|
'Level': 100,
|
||||||
|
'PowerSource': 'SONOS_CHARGING_RING',
|
||||||
|
'Temperature': 'NORMAL',
|
||||||
|
}),
|
||||||
|
'enabled_entities': list([
|
||||||
|
'binary_sensor.zone_a_charging',
|
||||||
|
'binary_sensor.zone_a_microphone',
|
||||||
|
'media_player.zone_a',
|
||||||
|
'number.zone_a_audio_delay',
|
||||||
|
'number.zone_a_balance',
|
||||||
|
'number.zone_a_bass',
|
||||||
|
'number.zone_a_music_surround_level',
|
||||||
|
'number.zone_a_sub_gain',
|
||||||
|
'number.zone_a_surround_level',
|
||||||
|
'number.zone_a_treble',
|
||||||
|
'sensor.zone_a_audio_input_format',
|
||||||
|
'sensor.zone_a_battery',
|
||||||
|
'switch.sonos_alarm_14',
|
||||||
|
'switch.zone_a_crossfade',
|
||||||
|
'switch.zone_a_loudness',
|
||||||
|
'switch.zone_a_night_sound',
|
||||||
|
'switch.zone_a_speech_enhancement',
|
||||||
|
'switch.zone_a_subwoofer_enabled',
|
||||||
|
'switch.zone_a_surround_enabled',
|
||||||
|
'switch.zone_a_surround_music_full_volume',
|
||||||
|
]),
|
||||||
|
'event_stats': dict({
|
||||||
|
'soco:parse_event_xml': list([
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
128,
|
||||||
|
0,
|
||||||
|
]),
|
||||||
|
}),
|
||||||
|
'hardware_version': '1.20.1.6-1.1',
|
||||||
|
'household_id': 'test_household_id',
|
||||||
|
'is_coordinator': True,
|
||||||
|
'media': dict({
|
||||||
|
'album_name': None,
|
||||||
|
'artist': None,
|
||||||
|
'channel': None,
|
||||||
|
'current_track_poll': dict({
|
||||||
|
'album': '',
|
||||||
|
'album_art': '',
|
||||||
|
'artist': '',
|
||||||
|
'duration': 'NOT_IMPLEMENTED',
|
||||||
|
'duration_in_s': None,
|
||||||
|
'metadata': 'NOT_IMPLEMENTED',
|
||||||
|
'playlist_position': '1',
|
||||||
|
'position': 'NOT_IMPLEMENTED',
|
||||||
|
'position_in_s': None,
|
||||||
|
'title': '',
|
||||||
|
'uri': '',
|
||||||
|
}),
|
||||||
|
'duration': None,
|
||||||
|
'image_url': None,
|
||||||
|
'playlist_name': None,
|
||||||
|
'queue_position': None,
|
||||||
|
'source_name': None,
|
||||||
|
'title': None,
|
||||||
|
'uri': None,
|
||||||
|
}),
|
||||||
|
'model_name': 'Model Name',
|
||||||
|
'model_number': 'S12',
|
||||||
|
'software_version': '49.2-64250',
|
||||||
|
'subscription_address': '192.168.42.2:8080',
|
||||||
|
'subscriptions_failed': False,
|
||||||
|
'version': '13.1',
|
||||||
|
'zone_group_state_stats': dict({
|
||||||
|
'processed': 10,
|
||||||
|
'total_requests': 12,
|
||||||
|
}),
|
||||||
|
'zone_name': 'Zone A',
|
||||||
|
})
|
||||||
|
# ---
|
63
tests/components/sonos/test_diagnostics.py
Normal file
63
tests/components/sonos/test_diagnostics.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
"""Tests for the diagnostics data provided by the Sonos integration."""
|
||||||
|
|
||||||
|
from syrupy.assertion import SnapshotAssertion
|
||||||
|
from syrupy.filters import paths
|
||||||
|
|
||||||
|
from homeassistant.components.sonos.const import DOMAIN
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceRegistry
|
||||||
|
|
||||||
|
from tests.common import MockConfigEntry
|
||||||
|
from tests.components.diagnostics import (
|
||||||
|
get_diagnostics_for_config_entry,
|
||||||
|
get_diagnostics_for_device,
|
||||||
|
)
|
||||||
|
from tests.typing import ClientSessionGenerator
|
||||||
|
|
||||||
|
|
||||||
|
async def test_diagnostics_config_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_client: ClientSessionGenerator,
|
||||||
|
async_autosetup_sonos,
|
||||||
|
config_entry: MockConfigEntry,
|
||||||
|
snapshot: SnapshotAssertion,
|
||||||
|
) -> None:
|
||||||
|
"""Test diagnostics for config entry."""
|
||||||
|
|
||||||
|
result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry)
|
||||||
|
|
||||||
|
# Exclude items that are timing dependent.
|
||||||
|
assert result == snapshot(
|
||||||
|
exclude=paths(
|
||||||
|
"current_timestamp",
|
||||||
|
"discovered.RINCON_test.event_stats.soco:from_didl_string",
|
||||||
|
"discovered.RINCON_test.sonos_group_entities",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_diagnostics_device(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_client: ClientSessionGenerator,
|
||||||
|
device_registry: DeviceRegistry,
|
||||||
|
async_autosetup_sonos,
|
||||||
|
config_entry: MockConfigEntry,
|
||||||
|
snapshot: SnapshotAssertion,
|
||||||
|
) -> None:
|
||||||
|
"""Test diagnostics for device."""
|
||||||
|
|
||||||
|
TEST_DEVICE = "RINCON_test"
|
||||||
|
|
||||||
|
device_entry = device_registry.async_get_device(identifiers={(DOMAIN, TEST_DEVICE)})
|
||||||
|
assert device_entry is not None
|
||||||
|
|
||||||
|
result = await get_diagnostics_for_device(
|
||||||
|
hass, hass_client, config_entry, device_entry
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == snapshot(
|
||||||
|
exclude=paths(
|
||||||
|
"event_stats.soco:from_didl_string",
|
||||||
|
"sonos_group_entities",
|
||||||
|
)
|
||||||
|
)
|
@@ -12,7 +12,7 @@ from homeassistant.helpers import entity_registry as er
|
|||||||
|
|
||||||
from . import configure_integration
|
from . import configure_integration
|
||||||
|
|
||||||
from tests.common import load_json_object_fixture, snapshot_platform
|
from tests.common import async_load_json_object_fixture, snapshot_platform
|
||||||
|
|
||||||
|
|
||||||
async def test_meter(
|
async def test_meter(
|
||||||
@@ -33,7 +33,9 @@ async def test_meter(
|
|||||||
hubDeviceId="test-hub-id",
|
hubDeviceId="test-hub-id",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
mock_get_status.return_value = load_json_object_fixture("meter_status.json", DOMAIN)
|
mock_get_status.return_value = await async_load_json_object_fixture(
|
||||||
|
hass, "meter_status.json", DOMAIN
|
||||||
|
)
|
||||||
|
|
||||||
with patch("homeassistant.components.switchbot_cloud.PLATFORMS", [Platform.SENSOR]):
|
with patch("homeassistant.components.switchbot_cloud.PLATFORMS", [Platform.SENSOR]):
|
||||||
entry = await configure_integration(hass)
|
entry = await configure_integration(hass)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user