mirror of
https://github.com/home-assistant/core.git
synced 2026-04-14 21:56:16 +02:00
Compare commits
1 Commits
dev
...
add_device
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a00a34052e |
161
.github/renovate.json
vendored
161
.github/renovate.json
vendored
@@ -1,161 +0,0 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": ["config:recommended"],
|
||||
|
||||
"enabledManagers": [
|
||||
"pep621",
|
||||
"pip_requirements",
|
||||
"pre-commit",
|
||||
"homeassistant-manifest"
|
||||
],
|
||||
|
||||
"pre-commit": {
|
||||
"enabled": true
|
||||
},
|
||||
|
||||
"pip_requirements": {
|
||||
"managerFilePatterns": [
|
||||
"/(^|/)requirements[\\w_-]*\\.txt$/",
|
||||
"/(^|/)homeassistant/package_constraints\\.txt$/"
|
||||
]
|
||||
},
|
||||
|
||||
"homeassistant-manifest": {
|
||||
"managerFilePatterns": [
|
||||
"/^homeassistant/components/[^/]+/manifest\\.json$/"
|
||||
]
|
||||
},
|
||||
|
||||
"minimumReleaseAge": "7 days",
|
||||
"prConcurrentLimit": 10,
|
||||
"prHourlyLimit": 2,
|
||||
"schedule": ["before 6am"],
|
||||
|
||||
"semanticCommits": "disabled",
|
||||
"commitMessageAction": "Update",
|
||||
"commitMessageTopic": "{{depName}}",
|
||||
"commitMessageExtra": "to {{newVersion}}",
|
||||
|
||||
"automerge": false,
|
||||
|
||||
"vulnerabilityAlerts": {
|
||||
"enabled": false
|
||||
},
|
||||
|
||||
"packageRules": [
|
||||
{
|
||||
"description": "Deny all by default — allowlist below re-enables specific packages",
|
||||
"matchPackageNames": ["*"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"description": "Core runtime dependencies (allowlisted)",
|
||||
"matchPackageNames": [
|
||||
"aiohttp",
|
||||
"aiohttp-fast-zlib",
|
||||
"aiohttp_cors",
|
||||
"aiohttp-asyncmdnsresolver",
|
||||
"yarl",
|
||||
"httpx",
|
||||
"requests",
|
||||
"urllib3",
|
||||
"certifi",
|
||||
"orjson",
|
||||
"PyYAML",
|
||||
"Jinja2",
|
||||
"cryptography",
|
||||
"pyOpenSSL",
|
||||
"PyJWT",
|
||||
"SQLAlchemy",
|
||||
"Pillow",
|
||||
"attrs",
|
||||
"uv",
|
||||
"voluptuous",
|
||||
"voluptuous-serialize",
|
||||
"voluptuous-openapi",
|
||||
"zeroconf"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency", "core"]
|
||||
},
|
||||
{
|
||||
"description": "Test dependencies (allowlisted)",
|
||||
"matchPackageNames": [
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
"pytest-aiohttp",
|
||||
"pytest-cov",
|
||||
"pytest-freezer",
|
||||
"pytest-github-actions-annotate-failures",
|
||||
"pytest-socket",
|
||||
"pytest-sugar",
|
||||
"pytest-timeout",
|
||||
"pytest-unordered",
|
||||
"pytest-picked",
|
||||
"pytest-xdist",
|
||||
"pylint",
|
||||
"pylint-per-file-ignores",
|
||||
"astroid",
|
||||
"coverage",
|
||||
"freezegun",
|
||||
"syrupy",
|
||||
"respx",
|
||||
"requests-mock",
|
||||
"ruff",
|
||||
"codespell",
|
||||
"yamllint",
|
||||
"zizmor"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "For types-* stubs, only allow patch updates. Major/minor bumps track the upstream runtime package version and must be manually coordinated with the corresponding pin.",
|
||||
"matchPackageNames": ["/^types-/"],
|
||||
"matchUpdateTypes": ["patch"],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "Pre-commit hook repos (allowlisted, matched by owner/repo)",
|
||||
"matchPackageNames": [
|
||||
"astral-sh/ruff-pre-commit",
|
||||
"codespell-project/codespell",
|
||||
"adrienverge/yamllint",
|
||||
"zizmorcore/zizmor-pre-commit"
|
||||
],
|
||||
"enabled": true,
|
||||
"labels": ["dependency"]
|
||||
},
|
||||
{
|
||||
"description": "Group ruff pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["astral-sh/ruff-pre-commit", "ruff"],
|
||||
"groupName": "ruff",
|
||||
"groupSlug": "ruff"
|
||||
},
|
||||
{
|
||||
"description": "Group codespell pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["codespell-project/codespell", "codespell"],
|
||||
"groupName": "codespell",
|
||||
"groupSlug": "codespell"
|
||||
},
|
||||
{
|
||||
"description": "Group yamllint pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["adrienverge/yamllint", "yamllint"],
|
||||
"groupName": "yamllint",
|
||||
"groupSlug": "yamllint"
|
||||
},
|
||||
{
|
||||
"description": "Group zizmor pre-commit hook with its PyPI twin into one PR",
|
||||
"matchPackageNames": ["zizmorcore/zizmor-pre-commit", "zizmor"],
|
||||
"groupName": "zizmor",
|
||||
"groupSlug": "zizmor"
|
||||
},
|
||||
{
|
||||
"description": "Group pylint with astroid (their versions are linked and must move together)",
|
||||
"matchPackageNames": ["pylint", "astroid"],
|
||||
"groupName": "pylint",
|
||||
"groupSlug": "pylint"
|
||||
}
|
||||
]
|
||||
}
|
||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -50,11 +50,9 @@ env:
|
||||
# - 10.10.3 is the latest (as of 6 Feb 2023)
|
||||
# 10.11 is the latest long-term-support
|
||||
# - 10.11.2 is the version currently shipped with Synology (as of 11 Oct 2023)
|
||||
# 11.4 is an LTS with support until May 2029
|
||||
# - 11.4.9 is used in Alpine 3.23 (used in latest HA base images as of 11 Apr 2026)
|
||||
# mysql 8.0.32 does not always behave the same as MariaDB
|
||||
# and some queries that work on MariaDB do not work on MySQL
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mariadb:11.4.9','mysql:8.0.32']"
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mysql:8.0.32']"
|
||||
# 12 is the oldest supported version
|
||||
# - 12.14 is the latest (as of 9 Feb 2023)
|
||||
# 15 is the latest version
|
||||
@@ -1064,9 +1062,7 @@ jobs:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: password
|
||||
options: >-
|
||||
--health-cmd="if command -v mariadb-admin >/dev/null; then mariadb-admin ping -uroot -ppassword; else mysqladmin ping -uroot -ppassword; fi"
|
||||
--health-interval=5s --health-timeout=2s --health-retries=3
|
||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
|
||||
@@ -36,7 +36,7 @@ repos:
|
||||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.38.0
|
||||
rev: v1.37.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1877,8 +1877,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner @lackas
|
||||
/tests/components/vicare/ @CFenner @lackas
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_gx/ @tomer-w
|
||||
|
||||
@@ -57,9 +57,9 @@ rules:
|
||||
entity-category: done
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -11,12 +11,12 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_apps": "Apps",
|
||||
"tracked_custom_integrations": "Custom integrations",
|
||||
"tracked_custom_integrations": "Community integrations",
|
||||
"tracked_integrations": "Integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_apps": "Select the apps you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the community integrations you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track"
|
||||
}
|
||||
}
|
||||
@@ -31,7 +31,7 @@
|
||||
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
|
||||
},
|
||||
"custom_integrations": {
|
||||
"name": "{custom_integration_domain} (custom)",
|
||||
"name": "{custom_integration_domain} (community)",
|
||||
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
|
||||
},
|
||||
"total_active_installations": {
|
||||
|
||||
@@ -143,7 +143,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"occupancy",
|
||||
"person",
|
||||
"power",
|
||||
"remote",
|
||||
"schedule",
|
||||
"select",
|
||||
"siren",
|
||||
@@ -151,7 +150,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"temperature",
|
||||
"text",
|
||||
"timer",
|
||||
"todo",
|
||||
"vacuum",
|
||||
"valve",
|
||||
"water_heater",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"issues": {
|
||||
"integration_removed": {
|
||||
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a community-developed [custom component]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
|
||||
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a [community integration]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
|
||||
"title": "The BMW Connected Drive integration has been removed"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,88 @@
|
||||
"""Provides conditions for device trackers."""
|
||||
|
||||
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.zone import ENTITY_ID_HOME as ENTITY_ID_HOME_ZONE
|
||||
from homeassistant.const import CONF_OPTIONS, CONF_ZONE, STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.condition import (
|
||||
ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL,
|
||||
Condition,
|
||||
ConditionConfig,
|
||||
EntityConditionBase,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
|
||||
from .const import ATTR_IN_ZONES, DOMAIN
|
||||
|
||||
ZONE_CONDITION_SCHEMA = ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_ZONE): vol.All(
|
||||
cv.ensure_list,
|
||||
vol.Length(min=1),
|
||||
[cv.entity_domain("zone")],
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
_IN_ZONES_SPEC = {DOMAIN: DomainSpec(value_source=ATTR_IN_ZONES)}
|
||||
|
||||
|
||||
class ZoneConditionBase(EntityConditionBase):
|
||||
"""Base for zone-based device tracker conditions."""
|
||||
|
||||
_domain_specs = _IN_ZONES_SPEC
|
||||
_schema = ZONE_CONDITION_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize the condition."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
self._zones: set[str] = set(config.options[CONF_ZONE])
|
||||
|
||||
def _in_target_zones(self, state: State) -> bool:
|
||||
"""Check if the device is in any of the selected zones.
|
||||
|
||||
For GPS-based trackers, uses the in_zones attribute.
|
||||
For scanner-based trackers (no in_zones attribute), infers from
|
||||
state: 'home' means the device is in zone.home.
|
||||
"""
|
||||
if (in_zones := self._get_tracked_value(state)) is not None:
|
||||
return bool(set(in_zones).intersection(self._zones))
|
||||
# Scanner tracker: state 'home' means in zone.home
|
||||
if state.state == STATE_HOME:
|
||||
return ENTITY_ID_HOME_ZONE in self._zones
|
||||
return False
|
||||
|
||||
|
||||
class InZoneCondition(ZoneConditionBase):
|
||||
"""Condition that tests if a device tracker is in one of the selected zones."""
|
||||
|
||||
def is_valid_state(self, entity_state: State) -> bool:
|
||||
"""Check that the device is in at least one of the selected zones."""
|
||||
return self._in_target_zones(entity_state)
|
||||
|
||||
|
||||
class NotInZoneCondition(ZoneConditionBase):
|
||||
"""Condition that tests if a device tracker is not in any of the selected zones."""
|
||||
|
||||
def is_valid_state(self, entity_state: State) -> bool:
|
||||
"""Check that the device is not in any of the selected zones."""
|
||||
return not self._in_target_zones(entity_state)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"in_zone": InZoneCondition,
|
||||
"is_home": make_entity_state_condition(DOMAIN, STATE_HOME),
|
||||
"is_not_home": make_entity_state_condition(DOMAIN, STATE_NOT_HOME),
|
||||
"not_in_zone": NotInZoneCondition,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
target: &condition_target
|
||||
entity:
|
||||
domain: device_tracker
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &condition_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -13,5 +13,18 @@
|
||||
- all
|
||||
- any
|
||||
|
||||
.condition_zone: &condition_zone
|
||||
<<: *condition_common
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
zone:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
domain: zone
|
||||
multiple: true
|
||||
|
||||
in_zone: *condition_zone
|
||||
is_home: *condition_common
|
||||
is_not_home: *condition_common
|
||||
not_in_zone: *condition_zone
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
{
|
||||
"conditions": {
|
||||
"in_zone": {
|
||||
"condition": "mdi:map-marker-check"
|
||||
},
|
||||
"is_home": {
|
||||
"condition": "mdi:account"
|
||||
},
|
||||
"is_not_home": {
|
||||
"condition": "mdi:account-arrow-right"
|
||||
},
|
||||
"not_in_zone": {
|
||||
"condition": "mdi:map-marker-remove"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
|
||||
@@ -1,9 +1,24 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"condition_zone_description": "The zones to check for.",
|
||||
"condition_zone_name": "Zone",
|
||||
"trigger_behavior_name": "Trigger when"
|
||||
},
|
||||
"conditions": {
|
||||
"in_zone": {
|
||||
"description": "Tests if one or more device trackers are in a zone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
|
||||
},
|
||||
"zone": {
|
||||
"description": "[%key:component::device_tracker::common::condition_zone_description%]",
|
||||
"name": "[%key:component::device_tracker::common::condition_zone_name%]"
|
||||
}
|
||||
},
|
||||
"name": "In zone"
|
||||
},
|
||||
"is_home": {
|
||||
"description": "Tests if one or more device trackers are home.",
|
||||
"fields": {
|
||||
@@ -21,6 +36,19 @@
|
||||
}
|
||||
},
|
||||
"name": "Device tracker is not home"
|
||||
},
|
||||
"not_in_zone": {
|
||||
"description": "Tests if one or more device trackers are not in a zone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
|
||||
},
|
||||
"zone": {
|
||||
"description": "[%key:component::device_tracker::common::condition_zone_description%]",
|
||||
"name": "[%key:component::device_tracker::common::condition_zone_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Not in zone"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==12.2.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@ from datetime import timedelta
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "duco"
|
||||
PLATFORMS = [Platform.FAN, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.FAN]
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"iaq_co2": {
|
||||
"default": "mdi:molecule-co2"
|
||||
},
|
||||
"iaq_rh": {
|
||||
"default": "mdi:water-percent"
|
||||
},
|
||||
"ventilation_state": {
|
||||
"default": "mdi:tune-variant"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -71,11 +71,11 @@ rules:
|
||||
Users can pair new modules (CO2 sensors, humidity sensors, zone valves)
|
||||
to their Duco box. Dynamic device support to be added in a follow-up PR.
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
"""Sensor platform for the Duco integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from duco.models import Node, NodeType, VentilationState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import DucoConfigEntry, DucoCoordinator
|
||||
from .entity import DucoEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class DucoSensorEntityDescription(SensorEntityDescription):
|
||||
"""Duco sensor entity description."""
|
||||
|
||||
value_fn: Callable[[Node], int | float | str | None]
|
||||
node_types: tuple[NodeType, ...]
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[DucoSensorEntityDescription, ...] = (
|
||||
DucoSensorEntityDescription(
|
||||
key="ventilation_state",
|
||||
translation_key="ventilation_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[s.lower() for s in VentilationState],
|
||||
value_fn=lambda node: (
|
||||
node.ventilation.state.lower() if node.ventilation else None
|
||||
),
|
||||
node_types=(NodeType.BOX,),
|
||||
),
|
||||
DucoSensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
value_fn=lambda node: node.sensor.co2 if node.sensor else None,
|
||||
node_types=(NodeType.UCCO2,),
|
||||
),
|
||||
DucoSensorEntityDescription(
|
||||
key="iaq_co2",
|
||||
translation_key="iaq_co2",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda node: node.sensor.iaq_co2 if node.sensor else None,
|
||||
node_types=(NodeType.UCCO2,),
|
||||
),
|
||||
DucoSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda node: node.sensor.rh if node.sensor else None,
|
||||
node_types=(NodeType.BSRH,),
|
||||
),
|
||||
DucoSensorEntityDescription(
|
||||
key="iaq_rh",
|
||||
translation_key="iaq_rh",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda node: node.sensor.iaq_rh if node.sensor else None,
|
||||
node_types=(NodeType.BSRH,),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: DucoConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Duco sensor entities."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
DucoSensorEntity(coordinator, node, description)
|
||||
for node in coordinator.data.values()
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if node.general.node_type in description.node_types
|
||||
)
|
||||
|
||||
|
||||
class DucoSensorEntity(DucoEntity, SensorEntity):
|
||||
"""Sensor entity for a Duco node."""
|
||||
|
||||
entity_description: DucoSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DucoCoordinator,
|
||||
node: Node,
|
||||
description: DucoSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor entity."""
|
||||
super().__init__(coordinator, node)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.unique_id}_{node.node_id}_{description.key}"
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | str | None:
|
||||
"""Return the sensor value."""
|
||||
return self.entity_description.value_fn(self._node)
|
||||
@@ -29,36 +29,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"iaq_co2": {
|
||||
"name": "CO2 air quality index"
|
||||
},
|
||||
"iaq_rh": {
|
||||
"name": "Humidity air quality index"
|
||||
},
|
||||
"ventilation_state": {
|
||||
"name": "Ventilation state",
|
||||
"state": {
|
||||
"aut1": "Automatic boost (15 min)",
|
||||
"aut2": "Automatic boost (30 min)",
|
||||
"aut3": "Automatic boost (45 min)",
|
||||
"auto": "Automatic",
|
||||
"cnt1": "Continuous low speed",
|
||||
"cnt2": "Continuous medium speed",
|
||||
"cnt3": "Continuous high speed",
|
||||
"empt": "Empty house",
|
||||
"man1": "Manual low speed (15 min)",
|
||||
"man1x2": "Manual low speed (30 min)",
|
||||
"man1x3": "Manual low speed (45 min)",
|
||||
"man2": "Manual medium speed (15 min)",
|
||||
"man2x2": "Manual medium speed (30 min)",
|
||||
"man2x3": "Manual medium speed (45 min)",
|
||||
"man3": "Manual high speed (15 min)",
|
||||
"man3x2": "Manual high speed (30 min)",
|
||||
"man3x3": "Manual high speed (45 min)"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.8"],
|
||||
"requirements": ["pyenphase==2.4.6"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==16.0.1", "Pillow==12.2.0"]
|
||||
"requirements": ["av==16.0.1", "Pillow==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -175,7 +175,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await server.start()
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.warning("Could not start go2rtc server", exc_info=True)
|
||||
await session.close()
|
||||
return False
|
||||
|
||||
async def on_stop(event: Event) -> None:
|
||||
|
||||
@@ -91,14 +91,10 @@ from .const import (
|
||||
DATA_STORE,
|
||||
DATA_SUPERVISOR_INFO,
|
||||
DOMAIN,
|
||||
HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
MAIN_COORDINATOR,
|
||||
STATS_COORDINATOR,
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
)
|
||||
from .coordinator import (
|
||||
HassioAddOnDataUpdateCoordinator,
|
||||
HassioMainDataUpdateCoordinator,
|
||||
HassioStatsDataUpdateCoordinator,
|
||||
HassioDataUpdateCoordinator,
|
||||
get_addons_info,
|
||||
get_addons_list,
|
||||
get_addons_stats,
|
||||
@@ -388,6 +384,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
]
|
||||
hass.data[DATA_SUPERVISOR_INFO]["addons"] = hass.data[DATA_ADDONS_LIST]
|
||||
|
||||
async_call_later(
|
||||
hass,
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
HassJob(update_info_data, cancel_on_shutdown=True),
|
||||
)
|
||||
|
||||
# Fetch data
|
||||
update_info_task = hass.async_create_task(update_info_data(), eager_start=True)
|
||||
|
||||
@@ -434,7 +436,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
# os info not yet fetched from supervisor, retry later
|
||||
async_call_later(
|
||||
hass,
|
||||
HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
async_setup_hardware_integration_job,
|
||||
)
|
||||
return
|
||||
@@ -460,20 +462,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
coordinator = HassioMainDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[MAIN_COORDINATOR] = coordinator
|
||||
|
||||
addon_coordinator = HassioAddOnDataUpdateCoordinator(
|
||||
hass, entry, dev_reg, coordinator.jobs
|
||||
)
|
||||
await addon_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[ADDONS_COORDINATOR] = addon_coordinator
|
||||
|
||||
stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry)
|
||||
await stats_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[STATS_COORDINATOR] = stats_coordinator
|
||||
hass.data[ADDONS_COORDINATOR] = coordinator
|
||||
|
||||
def deprecated_setup_issue() -> None:
|
||||
os_info = get_os_info(hass)
|
||||
@@ -540,12 +531,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
# Unload coordinator
|
||||
coordinator: HassioMainDataUpdateCoordinator = hass.data[MAIN_COORDINATOR]
|
||||
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator.unload()
|
||||
|
||||
# Pop coordinators
|
||||
hass.data.pop(MAIN_COORDINATOR, None)
|
||||
# Pop coordinator
|
||||
hass.data.pop(ADDONS_COORDINATOR, None)
|
||||
hass.data.pop(STATS_COORDINATOR, None)
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -22,7 +22,6 @@ from .const import (
|
||||
ATTR_STATE,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_MOUNTS,
|
||||
MAIN_COORDINATOR,
|
||||
)
|
||||
from .entity import HassioAddonEntity, HassioMountEntity
|
||||
|
||||
@@ -61,18 +60,17 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Binary sensor set up for Hass.io config entry."""
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator = hass.data[MAIN_COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
itertools.chain(
|
||||
[
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
[
|
||||
|
||||
@@ -77,9 +77,7 @@ EVENT_JOB = "job"
|
||||
UPDATE_KEY_SUPERVISOR = "supervisor"
|
||||
STARTUP_COMPLETE = "complete"
|
||||
|
||||
MAIN_COORDINATOR = "hassio_main_coordinator"
|
||||
ADDONS_COORDINATOR = "hassio_addons_coordinator"
|
||||
STATS_COORDINATOR = "hassio_stats_coordinator"
|
||||
|
||||
|
||||
DATA_COMPONENT: HassKey[HassIO] = HassKey(DOMAIN)
|
||||
@@ -96,9 +94,7 @@ DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||
DATA_ADDONS_LIST = "hassio_addons_list"
|
||||
HASSIO_MAIN_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
HASSIO_STATS_UPDATE_INTERVAL = timedelta(seconds=60)
|
||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
ATTR_VERSION = "version"
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections import defaultdict
|
||||
from collections.abc import Awaitable
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
|
||||
from aiohasupervisor.models import (
|
||||
@@ -15,9 +15,9 @@ from aiohasupervisor.models import (
|
||||
CIFSMountResponse,
|
||||
InstalledAddon,
|
||||
NFSMountResponse,
|
||||
ResponseData,
|
||||
StoreInfo,
|
||||
)
|
||||
from aiohasupervisor.models.base import ResponseData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
|
||||
@@ -35,11 +35,13 @@ from .const import (
|
||||
ATTR_SLUG,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
CONTAINER_INFO,
|
||||
CONTAINER_STATS,
|
||||
CORE_CONTAINER,
|
||||
DATA_ADDONS_INFO,
|
||||
DATA_ADDONS_LIST,
|
||||
DATA_ADDONS_STATS,
|
||||
DATA_COMPONENT,
|
||||
DATA_CORE_INFO,
|
||||
DATA_CORE_STATS,
|
||||
DATA_HOST_INFO,
|
||||
@@ -57,9 +59,7 @@ from .const import (
|
||||
DATA_SUPERVISOR_INFO,
|
||||
DATA_SUPERVISOR_STATS,
|
||||
DOMAIN,
|
||||
HASSIO_ADDON_UPDATE_INTERVAL,
|
||||
HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
HASSIO_STATS_UPDATE_INTERVAL,
|
||||
HASSIO_UPDATE_INTERVAL,
|
||||
REQUEST_REFRESH_DELAY,
|
||||
SUPERVISOR_CONTAINER,
|
||||
SupervisorEntityModel,
|
||||
@@ -318,314 +318,7 @@ def async_remove_devices_from_dev_reg(
|
||||
dev_reg.async_remove_device(dev.id)
|
||||
|
||||
|
||||
class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to retrieve Hass.io container stats."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_STATS_UPDATE_INTERVAL,
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||
),
|
||||
)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update stats data via library."""
|
||||
try:
|
||||
await self._fetch_stats()
|
||||
except SupervisorError as err:
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
new_data: dict[str, Any] = {}
|
||||
new_data[DATA_KEY_CORE] = get_core_stats(self.hass)
|
||||
new_data[DATA_KEY_SUPERVISOR] = get_supervisor_stats(self.hass)
|
||||
new_data[DATA_KEY_ADDONS] = get_addons_stats(self.hass)
|
||||
return new_data
|
||||
|
||||
async def _fetch_stats(self) -> None:
|
||||
"""Fetch container stats for subscribed entities."""
|
||||
container_updates = self._container_updates
|
||||
data = self.hass.data
|
||||
client = self.supervisor_client
|
||||
|
||||
# Fetch core and supervisor stats
|
||||
updates: dict[str, Awaitable] = {}
|
||||
if container_updates.get(CORE_CONTAINER, {}).get(CONTAINER_STATS):
|
||||
updates[DATA_CORE_STATS] = client.homeassistant.stats()
|
||||
if container_updates.get(SUPERVISOR_CONTAINER, {}).get(CONTAINER_STATS):
|
||||
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
|
||||
|
||||
if updates:
|
||||
api_results: list[ResponseData] = await asyncio.gather(*updates.values())
|
||||
for key, result in zip(updates, api_results, strict=True):
|
||||
data[key] = result.to_dict()
|
||||
|
||||
# Fetch addon stats
|
||||
addons_list = get_addons_list(self.hass) or []
|
||||
started_addons = {
|
||||
addon[ATTR_SLUG]
|
||||
for addon in addons_list
|
||||
if addon.get("state") in {AddonState.STARTED, AddonState.STARTUP}
|
||||
}
|
||||
|
||||
addons_stats: dict[str, Any] = data.setdefault(DATA_ADDONS_STATS, {})
|
||||
|
||||
# Clean up cache for stopped/removed addons
|
||||
for slug in addons_stats.keys() - started_addons:
|
||||
del addons_stats[slug]
|
||||
|
||||
# Fetch stats for addons with subscribed entities
|
||||
addon_stats_results = dict(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
self._update_addon_stats(slug)
|
||||
for slug in started_addons
|
||||
if container_updates.get(slug, {}).get(CONTAINER_STATS)
|
||||
]
|
||||
)
|
||||
)
|
||||
addons_stats.update(addon_stats_results)
|
||||
|
||||
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Update single addon stats."""
|
||||
try:
|
||||
stats = await self.supervisor_client.addons.addon_stats(slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
return (slug, stats.to_dict())
|
||||
|
||||
@callback
|
||||
def async_enable_container_updates(
|
||||
self, slug: str, entity_id: str, types: set[str]
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Enable stats updates for a container."""
|
||||
enabled_updates = self._container_updates[slug]
|
||||
for key in types:
|
||||
enabled_updates[key].add(entity_id)
|
||||
|
||||
@callback
|
||||
def _remove() -> None:
|
||||
for key in types:
|
||||
enabled_updates[key].discard(entity_id)
|
||||
if not enabled_updates[key]:
|
||||
del enabled_updates[key]
|
||||
if not enabled_updates:
|
||||
self._container_updates.pop(slug, None)
|
||||
|
||||
return _remove
|
||||
|
||||
|
||||
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to retrieve Hass.io Add-on status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
dev_reg: dr.DeviceRegistry,
|
||||
jobs: SupervisorJobs,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
|
||||
# We don't want an immediate refresh since we want to avoid
|
||||
# hammering the Supervisor API on startup
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||
),
|
||||
)
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self._addon_info_subscriptions: defaultdict[str, set[str]] = defaultdict(set)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self.jobs = jobs
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
is_first_update = not self.data
|
||||
client = self.supervisor_client
|
||||
|
||||
try:
|
||||
installed_addons: list[InstalledAddon] = await client.addons.list()
|
||||
all_addons = {addon.slug for addon in installed_addons}
|
||||
|
||||
# Fetch addon info for all addons on first update, or only
|
||||
# for addons with subscribed entities on subsequent updates.
|
||||
addon_info_results = dict(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
self._update_addon_info(slug)
|
||||
for slug in all_addons
|
||||
if is_first_update or self._addon_info_subscriptions.get(slug)
|
||||
]
|
||||
)
|
||||
)
|
||||
except SupervisorError as err:
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
# Update hass.data for legacy accessor functions
|
||||
data = self.hass.data
|
||||
addons_list_dicts = [addon.to_dict() for addon in installed_addons]
|
||||
data[DATA_ADDONS_LIST] = addons_list_dicts
|
||||
|
||||
# Update addon info cache in hass.data
|
||||
addon_info_cache: dict[str, Any] = data.setdefault(DATA_ADDONS_INFO, {})
|
||||
for slug in addon_info_cache.keys() - all_addons:
|
||||
del addon_info_cache[slug]
|
||||
addon_info_cache.update(addon_info_results)
|
||||
|
||||
# Deprecated 2026.4.0: Folding addons.list results into supervisor_info
|
||||
# for compatibility. Written to hass.data only, not coordinator data.
|
||||
if DATA_SUPERVISOR_INFO in data:
|
||||
data[DATA_SUPERVISOR_INFO]["addons"] = addons_list_dicts
|
||||
|
||||
# Build clean coordinator data
|
||||
store_data = get_store(self.hass)
|
||||
if store_data:
|
||||
repositories = {
|
||||
repo.slug: repo.name
|
||||
for repo in StoreInfo.from_dict(store_data).repositories
|
||||
}
|
||||
else:
|
||||
repositories = {}
|
||||
|
||||
new_data: dict[str, Any] = {}
|
||||
new_data[DATA_KEY_ADDONS] = {
|
||||
(slug := addon[ATTR_SLUG]): {
|
||||
**addon,
|
||||
ATTR_AUTO_UPDATE: (addon_info_cache.get(slug) or {}).get(
|
||||
ATTR_AUTO_UPDATE, False
|
||||
),
|
||||
ATTR_REPOSITORY: repositories.get(
|
||||
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
|
||||
),
|
||||
}
|
||||
for addon in addons_list_dicts
|
||||
}
|
||||
|
||||
# If this is the initial refresh, register all addons
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
# Remove add-ons that are no longer installed from device registry
|
||||
supervisor_addon_devices = {
|
||||
list(device.identifiers)[0][1]
|
||||
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
|
||||
self.entry_id
|
||||
)
|
||||
if device.model == SupervisorEntityModel.ADDON
|
||||
}
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
if self.data and (
|
||||
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.entry_id)
|
||||
)
|
||||
return {}
|
||||
|
||||
return new_data
|
||||
|
||||
async def get_changelog(self, addon_slug: str) -> str | None:
|
||||
"""Get the changelog for an add-on."""
|
||||
try:
|
||||
return await self.supervisor_client.store.addon_changelog(addon_slug)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
|
||||
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Return the info for an addon."""
|
||||
try:
|
||||
info = await self.supervisor_client.addons.addon_info(slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
# Translate to legacy hassio names for compatibility
|
||||
info_dict = info.to_dict()
|
||||
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
|
||||
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
|
||||
return (slug, info_dict)
|
||||
|
||||
@callback
|
||||
def async_enable_addon_info_updates(
|
||||
self, slug: str, entity_id: str
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Enable info updates for an add-on."""
|
||||
self._addon_info_subscriptions[slug].add(entity_id)
|
||||
|
||||
@callback
|
||||
def _remove() -> None:
|
||||
self._addon_info_subscriptions[slug].discard(entity_id)
|
||||
if not self._addon_info_subscriptions[slug]:
|
||||
del self._addon_info_subscriptions[slug]
|
||||
|
||||
return _remove
|
||||
|
||||
async def _async_refresh(
|
||||
self,
|
||||
log_failures: bool = True,
|
||||
raise_on_auth_failed: bool = False,
|
||||
scheduled: bool = False,
|
||||
raise_on_entry_error: bool = False,
|
||||
) -> None:
|
||||
"""Refresh data."""
|
||||
if not scheduled and not raise_on_auth_failed:
|
||||
# Force reloading add-on updates for non-scheduled
|
||||
# updates.
|
||||
#
|
||||
# If `raise_on_auth_failed` is set, it means this is
|
||||
# the first refresh and we do not want to delay
|
||||
# startup or cause a timeout so we only refresh the
|
||||
# updates if this is not a scheduled refresh and
|
||||
# we are not doing the first refresh.
|
||||
try:
|
||||
await self.supervisor_client.store.reload()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||
|
||||
await super()._async_refresh(
|
||||
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||
)
|
||||
|
||||
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
|
||||
"""Force refresh of addon info data for a specific addon."""
|
||||
try:
|
||||
slug, info = await self._update_addon_info(addon_slug)
|
||||
if info is not None and DATA_KEY_ADDONS in self.data:
|
||||
if slug in self.data[DATA_KEY_ADDONS]:
|
||||
data = deepcopy(self.data)
|
||||
data[DATA_KEY_ADDONS][slug].update(info)
|
||||
self.async_set_updated_data(data)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
|
||||
|
||||
|
||||
class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to retrieve Hass.io status."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -639,77 +332,82 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
update_interval=HASSIO_UPDATE_INTERVAL,
|
||||
# We don't want an immediate refresh since we want to avoid
|
||||
# hammering the Supervisor API on startup
|
||||
# fetching the container stats right away and avoid hammering
|
||||
# the Supervisor API on startup
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||
),
|
||||
)
|
||||
self.hassio = hass.data[DATA_COMPONENT]
|
||||
self.data = {}
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self.jobs = SupervisorJobs(hass)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
is_first_update = not self.data
|
||||
client = self.supervisor_client
|
||||
|
||||
try:
|
||||
(
|
||||
info,
|
||||
core_info,
|
||||
supervisor_info,
|
||||
os_info,
|
||||
host_info,
|
||||
store_info,
|
||||
network_info,
|
||||
) = await asyncio.gather(
|
||||
client.info(),
|
||||
client.homeassistant.info(),
|
||||
client.supervisor.info(),
|
||||
client.os.info(),
|
||||
client.host.info(),
|
||||
client.store.info(),
|
||||
client.network.info(),
|
||||
)
|
||||
mounts_info = await client.mounts.info()
|
||||
await self.jobs.refresh_data(is_first_update)
|
||||
await self.force_data_refresh(is_first_update)
|
||||
except SupervisorError as err:
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
# Build clean coordinator data
|
||||
new_data: dict[str, Any] = {}
|
||||
new_data[DATA_KEY_CORE] = core_info.to_dict()
|
||||
new_data[DATA_KEY_SUPERVISOR] = supervisor_info.to_dict()
|
||||
new_data[DATA_KEY_HOST] = host_info.to_dict()
|
||||
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
|
||||
supervisor_info = get_supervisor_info(self.hass) or {}
|
||||
addons_info = get_addons_info(self.hass) or {}
|
||||
addons_stats = get_addons_stats(self.hass)
|
||||
store_data = get_store(self.hass)
|
||||
mounts_info = await self.supervisor_client.mounts.info()
|
||||
addons_list = get_addons_list(self.hass) or []
|
||||
|
||||
if store_data:
|
||||
repositories = {
|
||||
repo.slug: repo.name
|
||||
for repo in StoreInfo.from_dict(store_data).repositories
|
||||
}
|
||||
else:
|
||||
repositories = {}
|
||||
|
||||
new_data[DATA_KEY_ADDONS] = {
|
||||
(slug := addon[ATTR_SLUG]): {
|
||||
**addon,
|
||||
**(addons_stats.get(slug) or {}),
|
||||
ATTR_AUTO_UPDATE: (addons_info.get(slug) or {}).get(
|
||||
ATTR_AUTO_UPDATE, False
|
||||
),
|
||||
ATTR_REPOSITORY: repositories.get(
|
||||
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
|
||||
),
|
||||
}
|
||||
for addon in addons_list
|
||||
}
|
||||
if self.is_hass_os:
|
||||
new_data[DATA_KEY_OS] = os_info.to_dict()
|
||||
new_data[DATA_KEY_OS] = get_os_info(self.hass)
|
||||
|
||||
# Update hass.data for legacy accessor functions
|
||||
data = self.hass.data
|
||||
data[DATA_INFO] = info.to_dict()
|
||||
data[DATA_CORE_INFO] = new_data[DATA_KEY_CORE]
|
||||
data[DATA_OS_INFO] = new_data.get(DATA_KEY_OS, os_info.to_dict())
|
||||
data[DATA_HOST_INFO] = new_data[DATA_KEY_HOST]
|
||||
data[DATA_STORE] = store_info.to_dict()
|
||||
data[DATA_NETWORK_INFO] = network_info.to_dict()
|
||||
# Separate dict for hass.data supervisor info since we add deprecated
|
||||
# compat keys that should not be in coordinator data
|
||||
supervisor_info_dict = supervisor_info.to_dict()
|
||||
# Deprecated 2026.4.0: Folding repositories and addons into
|
||||
# supervisor_info for compatibility. Written to hass.data only, not
|
||||
# coordinator data. Preserve the addons key from the addon coordinator.
|
||||
supervisor_info_dict["repositories"] = data[DATA_STORE][ATTR_REPOSITORIES]
|
||||
if (prev := data.get(DATA_SUPERVISOR_INFO)) and "addons" in prev:
|
||||
supervisor_info_dict["addons"] = prev["addons"]
|
||||
data[DATA_SUPERVISOR_INFO] = supervisor_info_dict
|
||||
new_data[DATA_KEY_CORE] = {
|
||||
**(get_core_info(self.hass) or {}),
|
||||
**get_core_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_SUPERVISOR] = {
|
||||
**supervisor_info,
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
|
||||
|
||||
# If this is the initial refresh, register all main components
|
||||
# If this is the initial refresh, register all addons and return the dict
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
async_register_mounts_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
|
||||
)
|
||||
@@ -725,6 +423,17 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
|
||||
)
|
||||
|
||||
# Remove add-ons that are no longer installed from device registry
|
||||
supervisor_addon_devices = {
|
||||
list(device.identifiers)[0][1]
|
||||
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
|
||||
self.entry_id
|
||||
)
|
||||
if device.model == SupervisorEntityModel.ADDON
|
||||
}
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
# Remove mounts that no longer exists from device registry
|
||||
supervisor_mount_devices = {
|
||||
device.name
|
||||
@@ -744,11 +453,12 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new mounts, we should reload the config entry so we can
|
||||
# If there are new add-ons or mounts, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
if self.data and (
|
||||
set(new_data[DATA_KEY_MOUNTS]) - set(self.data.get(DATA_KEY_MOUNTS, {}))
|
||||
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
|
||||
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.entry_id)
|
||||
@@ -757,6 +467,146 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
return new_data
|
||||
|
||||
async def get_changelog(self, addon_slug: str) -> str | None:
|
||||
"""Get the changelog for an add-on."""
|
||||
try:
|
||||
return await self.supervisor_client.store.addon_changelog(addon_slug)
|
||||
except SupervisorNotFoundError:
|
||||
return None
|
||||
|
||||
async def force_data_refresh(self, first_update: bool) -> None:
|
||||
"""Force update of the addon info."""
|
||||
container_updates = self._container_updates
|
||||
|
||||
data = self.hass.data
|
||||
client = self.supervisor_client
|
||||
|
||||
updates: dict[str, Awaitable[ResponseData]] = {
|
||||
DATA_INFO: client.info(),
|
||||
DATA_CORE_INFO: client.homeassistant.info(),
|
||||
DATA_SUPERVISOR_INFO: client.supervisor.info(),
|
||||
DATA_OS_INFO: client.os.info(),
|
||||
DATA_STORE: client.store.info(),
|
||||
}
|
||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||
updates[DATA_CORE_STATS] = client.homeassistant.stats()
|
||||
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
|
||||
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
|
||||
|
||||
# Pull off addons.list results for further processing before caching
|
||||
addons_list, *results = await asyncio.gather(
|
||||
client.addons.list(), *updates.values()
|
||||
)
|
||||
for key, result in zip(updates, cast(list[ResponseData], results), strict=True):
|
||||
data[key] = result.to_dict()
|
||||
|
||||
installed_addons = cast(list[InstalledAddon], addons_list)
|
||||
data[DATA_ADDONS_LIST] = [addon.to_dict() for addon in installed_addons]
|
||||
|
||||
# Deprecated 2026.4.0: Folding repositories and addons.list results into supervisor_info for compatibility
|
||||
# Can drop this after removal period
|
||||
data[DATA_SUPERVISOR_INFO].update(
|
||||
{
|
||||
"repositories": data[DATA_STORE][ATTR_REPOSITORIES],
|
||||
"addons": [addon.to_dict() for addon in installed_addons],
|
||||
}
|
||||
)
|
||||
|
||||
all_addons = {addon.slug for addon in installed_addons}
|
||||
started_addons = {
|
||||
addon.slug
|
||||
for addon in installed_addons
|
||||
if addon.state in {AddonState.STARTED, AddonState.STARTUP}
|
||||
}
|
||||
|
||||
#
|
||||
# Update addon info if its the first update or
|
||||
# there is at least one entity that needs the data.
|
||||
#
|
||||
# When entities are added they call async_enable_container_updates
|
||||
# to enable updates for the endpoints they need via
|
||||
# async_added_to_hass. This ensures that we only update
|
||||
# the data for the endpoints that are needed to avoid unnecessary
|
||||
# API calls since otherwise we would fetch stats for all containers
|
||||
# and throw them away.
|
||||
#
|
||||
for data_key, update_func, enabled_key, wanted_addons, needs_first_update in (
|
||||
(
|
||||
DATA_ADDONS_STATS,
|
||||
self._update_addon_stats,
|
||||
CONTAINER_STATS,
|
||||
started_addons,
|
||||
False,
|
||||
),
|
||||
(
|
||||
DATA_ADDONS_INFO,
|
||||
self._update_addon_info,
|
||||
CONTAINER_INFO,
|
||||
all_addons,
|
||||
True,
|
||||
),
|
||||
):
|
||||
container_data: dict[str, Any] = data.setdefault(data_key, {})
|
||||
|
||||
# Clean up cache
|
||||
for slug in container_data.keys() - wanted_addons:
|
||||
del container_data[slug]
|
||||
|
||||
# Update cache from API
|
||||
container_data.update(
|
||||
dict(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
update_func(slug)
|
||||
for slug in wanted_addons
|
||||
if (first_update and needs_first_update)
|
||||
or enabled_key in container_updates[slug]
|
||||
]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Refresh jobs data
|
||||
await self.jobs.refresh_data(first_update)
|
||||
|
||||
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Update single addon stats."""
|
||||
try:
|
||||
stats = await self.supervisor_client.addons.addon_stats(slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
return (slug, stats.to_dict())
|
||||
|
||||
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Return the info for an addon."""
|
||||
try:
|
||||
info = await self.supervisor_client.addons.addon_info(slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
|
||||
return (slug, None)
|
||||
# Translate to legacy hassio names for compatibility
|
||||
info_dict = info.to_dict()
|
||||
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
|
||||
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
|
||||
return (slug, info_dict)
|
||||
|
||||
@callback
|
||||
def async_enable_container_updates(
|
||||
self, slug: str, entity_id: str, types: set[str]
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Enable updates for an add-on."""
|
||||
enabled_updates = self._container_updates[slug]
|
||||
for key in types:
|
||||
enabled_updates[key].add(entity_id)
|
||||
|
||||
@callback
|
||||
def _remove() -> None:
|
||||
for key in types:
|
||||
enabled_updates[key].remove(entity_id)
|
||||
|
||||
return _remove
|
||||
|
||||
async def _async_refresh(
|
||||
self,
|
||||
log_failures: bool = True,
|
||||
@@ -766,16 +616,14 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
) -> None:
|
||||
"""Refresh data."""
|
||||
if not scheduled and not raise_on_auth_failed:
|
||||
# Force reloading updates of main components for
|
||||
# non-scheduled updates.
|
||||
#
|
||||
# Force refreshing updates for non-scheduled updates
|
||||
# If `raise_on_auth_failed` is set, it means this is
|
||||
# the first refresh and we do not want to delay
|
||||
# startup or cause a timeout so we only refresh the
|
||||
# updates if this is not a scheduled refresh and
|
||||
# we are not doing the first refresh.
|
||||
try:
|
||||
await self.supervisor_client.reload_updates()
|
||||
await self.supervisor_client.refresh_updates()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||
|
||||
@@ -783,6 +631,18 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||
)
|
||||
|
||||
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
|
||||
"""Force refresh of addon info data for a specific addon."""
|
||||
try:
|
||||
slug, info = await self._update_addon_info(addon_slug)
|
||||
if info is not None and DATA_KEY_ADDONS in self.data:
|
||||
if slug in self.data[DATA_KEY_ADDONS]:
|
||||
data = deepcopy(self.data)
|
||||
data[DATA_KEY_ADDONS][slug].update(info)
|
||||
self.async_set_updated_data(data)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
|
||||
|
||||
@callback
|
||||
def unload(self) -> None:
|
||||
"""Clean up when config entry unloaded."""
|
||||
|
||||
@@ -11,12 +11,8 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import ADDONS_COORDINATOR, MAIN_COORDINATOR, STATS_COORDINATOR
|
||||
from .coordinator import (
|
||||
HassioAddOnDataUpdateCoordinator,
|
||||
HassioMainDataUpdateCoordinator,
|
||||
HassioStatsDataUpdateCoordinator,
|
||||
)
|
||||
from .const import ADDONS_COORDINATOR
|
||||
from .coordinator import HassioDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -24,9 +20,7 @@ async def async_get_config_entry_diagnostics(
|
||||
config_entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: HassioMainDataUpdateCoordinator = hass.data[MAIN_COORDINATOR]
|
||||
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
stats_coordinator: HassioStatsDataUpdateCoordinator = hass.data[STATS_COORDINATOR]
|
||||
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
device_registry = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
@@ -59,7 +53,5 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
return {
|
||||
"coordinator_data": coordinator.data,
|
||||
"addons_coordinator_data": addons_coordinator.data,
|
||||
"stats_coordinator_data": stats_coordinator.data,
|
||||
"devices": devices,
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import (
|
||||
ATTR_SLUG,
|
||||
CONTAINER_STATS,
|
||||
CORE_CONTAINER,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
@@ -20,79 +21,20 @@ from .const import (
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DOMAIN,
|
||||
KEY_TO_UPDATE_TYPES,
|
||||
SUPERVISOR_CONTAINER,
|
||||
)
|
||||
from .coordinator import (
|
||||
HassioAddOnDataUpdateCoordinator,
|
||||
HassioMainDataUpdateCoordinator,
|
||||
HassioStatsDataUpdateCoordinator,
|
||||
)
|
||||
from .coordinator import HassioDataUpdateCoordinator
|
||||
|
||||
|
||||
class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]):
|
||||
"""Base entity for container stats (CPU, memory)."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioStatsDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
*,
|
||||
container_id: str,
|
||||
data_key: str,
|
||||
device_id: str,
|
||||
unique_id_prefix: str,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._container_id = container_id
|
||||
self._data_key = data_key
|
||||
self._attr_unique_id = f"{unique_id_prefix}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
if self._data_key == DATA_KEY_ADDONS:
|
||||
return (
|
||||
super().available
|
||||
and DATA_KEY_ADDONS in self.coordinator.data
|
||||
and self.entity_description.key
|
||||
in (
|
||||
self.coordinator.data[DATA_KEY_ADDONS].get(self._container_id) or {}
|
||||
)
|
||||
)
|
||||
return (
|
||||
super().available
|
||||
and self._data_key in self.coordinator.data
|
||||
and self.entity_description.key in self.coordinator.data[self._data_key]
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to stats updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_enable_container_updates(
|
||||
self._container_id, self.entity_id, {CONTAINER_STATS}
|
||||
)
|
||||
)
|
||||
# Stats are only fetched for containers with subscribed entities.
|
||||
# The first coordinator refresh (before entities exist) has no
|
||||
# subscribers, so no stats are fetched. Schedule a debounced
|
||||
# refresh so that all stats entities registering during platform
|
||||
# setup are batched into a single API call.
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
|
||||
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base entity for a Hass.io add-on."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioAddOnDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
addon: dict[str, Any],
|
||||
) -> None:
|
||||
@@ -114,23 +56,26 @@ class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to addon info updates."""
|
||||
"""Subscribe to updates."""
|
||||
await super().async_added_to_hass()
|
||||
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_enable_addon_info_updates(
|
||||
self._addon_slug, self.entity_id
|
||||
self.coordinator.async_enable_container_updates(
|
||||
self._addon_slug, self.entity_id, update_types
|
||||
)
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class HassioOSEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Hass.io OS."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioMainDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
@@ -149,14 +94,14 @@ class HassioOSEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
)
|
||||
|
||||
|
||||
class HassioHostEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
class HassioHostEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Hass.io host."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioMainDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
@@ -175,14 +120,14 @@ class HassioHostEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
)
|
||||
|
||||
|
||||
class HassioSupervisorEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Supervisor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioMainDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
@@ -201,15 +146,27 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator])
|
||||
in self.coordinator.data[DATA_KEY_SUPERVISOR]
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates."""
|
||||
await super().async_added_to_hass()
|
||||
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_enable_container_updates(
|
||||
SUPERVISOR_CONTAINER, self.entity_id, update_types
|
||||
)
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
class HassioCoreEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
|
||||
class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Core."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioMainDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
@@ -227,15 +184,27 @@ class HassioCoreEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE]
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to updates."""
|
||||
await super().async_added_to_hass()
|
||||
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_enable_container_updates(
|
||||
CORE_CONTAINER, self.entity_id, update_types
|
||||
)
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
class HassioMountEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
|
||||
|
||||
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Mount."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioMainDataUpdateCoordinator,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
mount: CIFSMountResponse | NFSMountResponse,
|
||||
) -> None:
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
)
|
||||
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_DATA,
|
||||
ATTR_HEALTHY,
|
||||
ATTR_SLUG,
|
||||
@@ -53,7 +54,6 @@ from .const import (
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_MOUNT_MOUNT_FAILED,
|
||||
MAIN_COORDINATOR,
|
||||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_ADDON_URL,
|
||||
PLACEHOLDER_KEY_FREE_SPACE,
|
||||
@@ -62,7 +62,7 @@ from .const import (
|
||||
STARTUP_COMPLETE,
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
)
|
||||
from .coordinator import HassioMainDataUpdateCoordinator, get_addons_list, get_host_info
|
||||
from .coordinator import HassioDataUpdateCoordinator, get_addons_list, get_host_info
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
ISSUE_KEY_UNHEALTHY = "unhealthy"
|
||||
@@ -417,8 +417,8 @@ class SupervisorIssues:
|
||||
|
||||
def _async_coordinator_refresh(self) -> None:
|
||||
"""Refresh coordinator to update latest data in entities."""
|
||||
coordinator: HassioMainDataUpdateCoordinator | None
|
||||
if coordinator := self._hass.data.get(MAIN_COORDINATOR):
|
||||
coordinator: HassioDataUpdateCoordinator | None
|
||||
if coordinator := self._hass.data.get(ADDONS_COORDINATOR):
|
||||
coordinator.config_entry.async_create_task(
|
||||
self._hass, coordinator.async_refresh()
|
||||
)
|
||||
|
||||
@@ -17,24 +17,20 @@ from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_SLUG,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
CORE_CONTAINER,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
MAIN_COORDINATOR,
|
||||
STATS_COORDINATOR,
|
||||
SUPERVISOR_CONTAINER,
|
||||
)
|
||||
from .entity import (
|
||||
HassioAddonEntity,
|
||||
HassioCoreEntity,
|
||||
HassioHostEntity,
|
||||
HassioOSEntity,
|
||||
HassioStatsEntity,
|
||||
HassioSupervisorEntity,
|
||||
)
|
||||
|
||||
COMMON_ENTITY_DESCRIPTIONS = (
|
||||
@@ -67,7 +63,10 @@ STATS_ENTITY_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
ADDON_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS + STATS_ENTITY_DESCRIPTIONS
|
||||
CORE_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
|
||||
OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS
|
||||
SUPERVISOR_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
|
||||
|
||||
HOST_ENTITY_DESCRIPTIONS = (
|
||||
SensorEntityDescription(
|
||||
@@ -115,64 +114,36 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Sensor set up for Hass.io config entry."""
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator = hass.data[MAIN_COORDINATOR]
|
||||
stats_coordinator = hass.data[STATS_COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
entities: list[SensorEntity] = []
|
||||
|
||||
# Add-on non-stats sensors (version, version_latest)
|
||||
entities.extend(
|
||||
entities: list[
|
||||
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
|
||||
] = [
|
||||
HassioAddonSensor(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in COMMON_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
]
|
||||
|
||||
# Add-on stats sensors (cpu_percent, memory_percent)
|
||||
entities.extend(
|
||||
HassioStatsSensor(
|
||||
coordinator=stats_coordinator,
|
||||
CoreSensor(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
container_id=addon[ATTR_SLUG],
|
||||
data_key=DATA_KEY_ADDONS,
|
||||
device_id=addon[ATTR_SLUG],
|
||||
unique_id_prefix=addon[ATTR_SLUG],
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in STATS_ENTITY_DESCRIPTIONS
|
||||
for entity_description in CORE_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
# Core stats sensors
|
||||
entities.extend(
|
||||
HassioStatsSensor(
|
||||
coordinator=stats_coordinator,
|
||||
SupervisorSensor(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
container_id=CORE_CONTAINER,
|
||||
data_key=DATA_KEY_CORE,
|
||||
device_id="core",
|
||||
unique_id_prefix="home_assistant_core",
|
||||
)
|
||||
for entity_description in STATS_ENTITY_DESCRIPTIONS
|
||||
for entity_description in SUPERVISOR_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
# Supervisor stats sensors
|
||||
entities.extend(
|
||||
HassioStatsSensor(
|
||||
coordinator=stats_coordinator,
|
||||
entity_description=entity_description,
|
||||
container_id=SUPERVISOR_CONTAINER,
|
||||
data_key=DATA_KEY_SUPERVISOR,
|
||||
device_id="supervisor",
|
||||
unique_id_prefix="home_assistant_supervisor",
|
||||
)
|
||||
for entity_description in STATS_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
# Host sensors
|
||||
entities.extend(
|
||||
HostSensor(
|
||||
coordinator=coordinator,
|
||||
@@ -181,7 +152,6 @@ async def async_setup_entry(
|
||||
for entity_description in HOST_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
# OS sensors
|
||||
if coordinator.is_hass_os:
|
||||
entities.extend(
|
||||
HassioOSSensor(
|
||||
@@ -205,21 +175,8 @@ class HassioAddonSensor(HassioAddonEntity, SensorEntity):
|
||||
]
|
||||
|
||||
|
||||
class HassioStatsSensor(HassioStatsEntity, SensorEntity):
|
||||
"""Sensor to track container stats."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
"""Return native value of entity."""
|
||||
if self._data_key == DATA_KEY_ADDONS:
|
||||
return self.coordinator.data[DATA_KEY_ADDONS][self._container_id][
|
||||
self.entity_description.key
|
||||
]
|
||||
return self.coordinator.data[self._data_key][self.entity_description.key]
|
||||
|
||||
|
||||
class HassioOSSensor(HassioOSEntity, SensorEntity):
|
||||
"""Sensor to track a Hass.io OS attribute."""
|
||||
"""Sensor to track a Hass.io add-on attribute."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
@@ -227,6 +184,24 @@ class HassioOSSensor(HassioOSEntity, SensorEntity):
|
||||
return self.coordinator.data[DATA_KEY_OS][self.entity_description.key]
|
||||
|
||||
|
||||
class CoreSensor(HassioCoreEntity, SensorEntity):
|
||||
"""Sensor to track a core attribute."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
"""Return native value of entity."""
|
||||
return self.coordinator.data[DATA_KEY_CORE][self.entity_description.key]
|
||||
|
||||
|
||||
class SupervisorSensor(HassioSupervisorEntity, SensorEntity):
|
||||
"""Sensor to track a supervisor attribute."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
"""Return native value of entity."""
|
||||
return self.coordinator.data[DATA_KEY_SUPERVISOR][self.entity_description.key]
|
||||
|
||||
|
||||
class HostSensor(HassioHostEntity, SensorEntity):
|
||||
"""Sensor to track a host attribute."""
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from homeassistant.helpers import (
|
||||
from homeassistant.util.dt import now
|
||||
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_ADDON,
|
||||
ATTR_ADDONS,
|
||||
ATTR_APP,
|
||||
@@ -45,10 +46,9 @@ from .const import (
|
||||
ATTR_PASSWORD,
|
||||
ATTR_SLUG,
|
||||
DOMAIN,
|
||||
MAIN_COORDINATOR,
|
||||
SupervisorEntityModel,
|
||||
)
|
||||
from .coordinator import HassioMainDataUpdateCoordinator, get_addons_info
|
||||
from .coordinator import HassioDataUpdateCoordinator, get_addons_info
|
||||
|
||||
SERVICE_ADDON_START = "addon_start"
|
||||
SERVICE_ADDON_STOP = "addon_stop"
|
||||
@@ -406,7 +406,7 @@ def async_register_network_storage_services(
|
||||
|
||||
async def async_mount_reload(service: ServiceCall) -> None:
|
||||
"""Handle service calls for Hass.io."""
|
||||
coordinator: HassioMainDataUpdateCoordinator | None = None
|
||||
coordinator: HassioDataUpdateCoordinator | None = None
|
||||
|
||||
if (device := dev_reg.async_get(service.data[ATTR_DEVICE_ID])) is None:
|
||||
raise ServiceValidationError(
|
||||
@@ -417,7 +417,7 @@ def async_register_network_storage_services(
|
||||
if (
|
||||
device.name is None
|
||||
or device.model != SupervisorEntityModel.MOUNT
|
||||
or (coordinator := hass.data.get(MAIN_COORDINATOR)) is None
|
||||
or (coordinator := hass.data.get(ADDONS_COORDINATOR)) is None
|
||||
or coordinator.entry_id not in device.config_entries
|
||||
):
|
||||
raise ServiceValidationError(
|
||||
|
||||
@@ -29,7 +29,6 @@ from .const import (
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
MAIN_COORDINATOR,
|
||||
)
|
||||
from .entity import (
|
||||
HassioAddonEntity,
|
||||
@@ -52,9 +51,9 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Supervisor update based on a config entry."""
|
||||
coordinator = hass.data[MAIN_COORDINATOR]
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
entities: list[UpdateEntity] = [
|
||||
entities = [
|
||||
SupervisorSupervisorUpdateEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
@@ -65,6 +64,15 @@ async def async_setup_entry(
|
||||
),
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
SupervisorAddonUpdateEntity(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
if coordinator.is_hass_os:
|
||||
entities.append(
|
||||
SupervisorOSUpdateEntity(
|
||||
@@ -73,16 +81,6 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
entities.extend(
|
||||
SupervisorAddonUpdateEntity(
|
||||
addon=addon,
|
||||
coordinator=addons_coordinator,
|
||||
entity_description=ENTITY_DESCRIPTION,
|
||||
)
|
||||
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
|
||||
"description": "The integration `{domain}` could not be found. This happens when a (community) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
|
||||
"menu_options": {
|
||||
"confirm": "Remove previous configurations",
|
||||
"ignore": "Ignore"
|
||||
@@ -236,7 +236,7 @@
|
||||
"description": "Restarts Home Assistant.",
|
||||
"fields": {
|
||||
"safe_mode": {
|
||||
"description": "Disable custom integrations and custom cards.",
|
||||
"description": "Disable community integrations and community cards.",
|
||||
"name": "Safe mode"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"serialx==1.2.2",
|
||||
"serialx==1.1.1",
|
||||
"universal-silabs-flasher==1.0.3",
|
||||
"ha-silabs-firmware-client==0.3.0"
|
||||
]
|
||||
|
||||
@@ -625,13 +625,10 @@ def _get_test_socket() -> socket.socket:
|
||||
@callback
|
||||
def async_port_is_available(port: int) -> bool:
|
||||
"""Check to see if a port is available."""
|
||||
test_socket = _get_test_socket()
|
||||
try:
|
||||
test_socket.bind(("", port))
|
||||
_get_test_socket().bind(("", port))
|
||||
except OSError:
|
||||
return False
|
||||
finally:
|
||||
test_socket.close()
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.8.0"]
|
||||
"requirements": ["homematicip==2.7.0"]
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@ class HassAqualinkBinarySensor(
|
||||
) -> None:
|
||||
"""Initialize AquaLink binary sensor."""
|
||||
super().__init__(coordinator, dev)
|
||||
self._attr_name = dev.label
|
||||
if dev.label == "Freeze Protection":
|
||||
self._attr_device_class = BinarySensorDeviceClass.COLD
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ class HassAqualinkThermostat(AqualinkEntity[AqualinkThermostat], ClimateEntity):
|
||||
) -> None:
|
||||
"""Initialize AquaLink thermostat."""
|
||||
super().__init__(coordinator, dev)
|
||||
self._attr_name = dev.label.split(" ")[0]
|
||||
self._attr_temperature_unit = (
|
||||
UnitOfTemperature.FAHRENHEIT
|
||||
if dev.unit == "F"
|
||||
|
||||
@@ -22,9 +22,6 @@ class AqualinkEntity[AqualinkDeviceT: AqualinkDevice](
|
||||
entity update flow.
|
||||
"""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkDeviceT
|
||||
) -> None:
|
||||
|
||||
@@ -46,6 +46,7 @@ class HassAqualinkLight(AqualinkEntity[AqualinkLight], LightEntity):
|
||||
) -> None:
|
||||
"""Initialize AquaLink light."""
|
||||
super().__init__(coordinator, dev)
|
||||
self._attr_name = dev.label
|
||||
if dev.supports_effect:
|
||||
self._attr_effect_list = list(dev.supported_effects)
|
||||
self._attr_supported_features = LightEntityFeature.EFFECT
|
||||
|
||||
@@ -38,6 +38,7 @@ class HassAqualinkSensor(AqualinkEntity[AqualinkSensor], SensorEntity):
|
||||
) -> None:
|
||||
"""Initialize AquaLink sensor."""
|
||||
super().__init__(coordinator, dev)
|
||||
self._attr_name = dev.label
|
||||
if not dev.name.endswith("_temp"):
|
||||
return
|
||||
self._attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
|
||||
@@ -40,7 +40,7 @@ class HassAqualinkSwitch(AqualinkEntity[AqualinkSwitch], SwitchEntity):
|
||||
) -> None:
|
||||
"""Initialize AquaLink switch."""
|
||||
super().__init__(coordinator, dev)
|
||||
name = dev.label
|
||||
name = self._attr_name = dev.label
|
||||
if name == "Cleaner":
|
||||
self._attr_icon = "mdi:robot-vacuum"
|
||||
elif name == "Waterfall" or name.endswith("Dscnt"):
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/image_upload",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["Pillow==12.2.0"]
|
||||
"requirements": ["Pillow==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -127,12 +127,6 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _read_image_size(image_path: str) -> tuple[int, int]:
|
||||
"""Open image to determine image size."""
|
||||
with Image.open(image_path) as image:
|
||||
return image.size
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Matrix bot component."""
|
||||
config = config[DOMAIN]
|
||||
@@ -510,9 +504,8 @@ class MatrixBot:
|
||||
return
|
||||
|
||||
# Get required image metadata.
|
||||
(width, height) = await self.hass.async_add_executor_job(
|
||||
_read_image_size, image_path
|
||||
)
|
||||
image = await self.hass.async_add_executor_job(Image.open, image_path)
|
||||
(width, height) = image.size
|
||||
mime_type = mimetypes.guess_type(image_path)[0]
|
||||
file_stat = await aiofiles.os.stat(image_path)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["matrix_client"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["matrix-nio==0.25.2", "Pillow==12.2.0", "aiofiles==24.1.0"]
|
||||
"requirements": ["matrix-nio==0.25.2", "Pillow==12.1.1", "aiofiles==24.1.0"]
|
||||
}
|
||||
|
||||
@@ -24,8 +24,6 @@ See https://modelcontextprotocol.io/docs/concepts/transports
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
@@ -104,29 +102,17 @@ class Streams:
|
||||
write_stream: MemoryObjectSendStream[SessionMessage]
|
||||
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
|
||||
|
||||
async def aclose(self) -> None:
|
||||
"""Close open memory streams."""
|
||||
await self.read_stream.aclose()
|
||||
await self.read_stream_writer.aclose()
|
||||
await self.write_stream.aclose()
|
||||
await self.write_stream_reader.aclose()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_streams() -> AsyncGenerator[Streams]:
|
||||
def create_streams() -> Streams:
|
||||
"""Create a new pair of streams for MCP server communication."""
|
||||
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
||||
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
||||
streams = Streams(
|
||||
return Streams(
|
||||
read_stream=read_stream,
|
||||
read_stream_writer=read_stream_writer,
|
||||
write_stream=write_stream,
|
||||
write_stream_reader=write_stream_reader,
|
||||
)
|
||||
try:
|
||||
yield streams
|
||||
finally:
|
||||
await streams.aclose()
|
||||
|
||||
|
||||
async def create_mcp_server(
|
||||
@@ -169,9 +155,9 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
session_manager = entry.runtime_data
|
||||
|
||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
||||
streams = create_streams()
|
||||
|
||||
async with (
|
||||
create_streams() as streams,
|
||||
sse_response(request) as response,
|
||||
session_manager.create(Session(streams.read_stream_writer)) as session_id,
|
||||
):
|
||||
@@ -275,24 +261,21 @@ class ModelContextProtocolStreamableView(HomeAssistantView):
|
||||
# request is sent to the MCP server and we wait for a single response
|
||||
# then shut down the server.
|
||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
||||
streams = create_streams()
|
||||
|
||||
async with create_streams() as streams:
|
||||
|
||||
async def run_server() -> None:
|
||||
await server.run(
|
||||
streams.read_stream, streams.write_stream, options, stateless=True
|
||||
)
|
||||
|
||||
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
|
||||
tg.start_soon(run_server)
|
||||
|
||||
await streams.read_stream_writer.send(SessionMessage(message))
|
||||
session_message = await anext(streams.write_stream_reader)
|
||||
tg.cancel_scope.cancel()
|
||||
|
||||
_LOGGER.debug("Sending response: %s", session_message)
|
||||
return web.json_response(
|
||||
data=session_message.message.model_dump(
|
||||
by_alias=True, exclude_none=True
|
||||
),
|
||||
async def run_server() -> None:
|
||||
await server.run(
|
||||
streams.read_stream, streams.write_stream, options, stateless=True
|
||||
)
|
||||
|
||||
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
|
||||
tg.start_soon(run_server)
|
||||
|
||||
await streams.read_stream_writer.send(SessionMessage(message))
|
||||
session_message = await anext(streams.write_stream_reader)
|
||||
tg.cancel_scope.cancel()
|
||||
|
||||
_LOGGER.debug("Sending response: %s", session_message)
|
||||
return web.json_response(
|
||||
data=session_message.message.model_dump(by_alias=True, exclude_none=True),
|
||||
)
|
||||
|
||||
@@ -19,12 +19,7 @@ from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.SENSOR,
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) -> bool:
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
"""Support for MelCloud device binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from pymelcloud import DEVICE_TYPE_ATW
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class MelcloudBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Melcloud binary sensor entity."""
|
||||
|
||||
value_fn: Callable[[Any], bool | None]
|
||||
enabled: Callable[[Any], bool]
|
||||
|
||||
|
||||
ATW_BINARY_SENSORS: tuple[MelcloudBinarySensorEntityDescription, ...] = (
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="boiler_status",
|
||||
translation_key="boiler_status",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.boiler_status,
|
||||
enabled=lambda data: data.device.boiler_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="booster_heater1_status",
|
||||
translation_key="booster_heater_status",
|
||||
translation_placeholders={"number": "1"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.booster_heater1_status,
|
||||
enabled=lambda data: data.device.booster_heater1_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="booster_heater2_status",
|
||||
translation_key="booster_heater_status",
|
||||
translation_placeholders={"number": "2"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.device.booster_heater2_status,
|
||||
enabled=lambda data: data.device.booster_heater2_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="booster_heater2plus_status",
|
||||
translation_key="booster_heater_status",
|
||||
translation_placeholders={"number": "2+"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.device.booster_heater2plus_status,
|
||||
enabled=lambda data: data.device.booster_heater2plus_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="immersion_heater_status",
|
||||
translation_key="immersion_heater_status",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.immersion_heater_status,
|
||||
enabled=lambda data: data.device.immersion_heater_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="water_pump1_status",
|
||||
translation_key="water_pump_status",
|
||||
translation_placeholders={"number": "1"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.water_pump1_status,
|
||||
enabled=lambda data: data.device.water_pump1_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="water_pump2_status",
|
||||
translation_key="water_pump_status",
|
||||
translation_placeholders={"number": "2"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.water_pump2_status,
|
||||
enabled=lambda data: data.device.water_pump2_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="water_pump3_status",
|
||||
translation_key="water_pump_status",
|
||||
translation_placeholders={"number": "3"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.device.water_pump3_status,
|
||||
enabled=lambda data: data.device.water_pump3_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="water_pump4_status",
|
||||
translation_key="water_pump_status",
|
||||
translation_placeholders={"number": "4"},
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.device.water_pump4_status,
|
||||
enabled=lambda data: data.device.water_pump4_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="valve_3way_status",
|
||||
translation_key="valve_3way_status",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.device.valve_3way_status,
|
||||
enabled=lambda data: data.device.valve_3way_status is not None,
|
||||
),
|
||||
MelcloudBinarySensorEntityDescription(
|
||||
key="valve_2way_status",
|
||||
translation_key="valve_2way_status",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: data.device.valve_2way_status,
|
||||
enabled=lambda data: data.device.valve_2way_status is not None,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MELCloud device binary sensors based on config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
if DEVICE_TYPE_ATW not in coordinator:
|
||||
return
|
||||
|
||||
entities: list[MelDeviceBinarySensor] = [
|
||||
MelDeviceBinarySensor(coord, description)
|
||||
for description in ATW_BINARY_SENSORS
|
||||
for coord in coordinator[DEVICE_TYPE_ATW]
|
||||
if description.enabled(coord)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class MelDeviceBinarySensor(MelCloudEntity, BinarySensorEntity):
|
||||
"""Representation of a Binary Sensor."""
|
||||
|
||||
entity_description: MelcloudBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
description: MelcloudBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.device.serial}-{coordinator.device.mac}-{description.key}"
|
||||
)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the binary sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
@@ -1,25 +1,5 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"boiler_status": {
|
||||
"default": "mdi:water-boiler-off",
|
||||
"state": {
|
||||
"on": "mdi:water-boiler"
|
||||
}
|
||||
},
|
||||
"valve_2way_status": {
|
||||
"default": "mdi:valve-closed",
|
||||
"state": {
|
||||
"on": "mdi:valve-open"
|
||||
}
|
||||
},
|
||||
"valve_3way_status": {
|
||||
"default": "mdi:valve-closed",
|
||||
"state": {
|
||||
"on": "mdi:valve-open"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"energy_consumed": {
|
||||
"default": "mdi:factory"
|
||||
|
||||
@@ -42,26 +42,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"boiler_status": {
|
||||
"name": "Boiler"
|
||||
},
|
||||
"booster_heater_status": {
|
||||
"name": "Booster heater {number}"
|
||||
},
|
||||
"immersion_heater_status": {
|
||||
"name": "Immersion heater"
|
||||
},
|
||||
"valve_2way_status": {
|
||||
"name": "2-way valve"
|
||||
},
|
||||
"valve_3way_status": {
|
||||
"name": "3-way valve"
|
||||
},
|
||||
"water_pump_status": {
|
||||
"name": "Water pump {number}"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"condensing_temperature": {
|
||||
"name": "Condensing temperature"
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION,
|
||||
ATTR_FORECAST_NATIVE_TEMP,
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
@@ -185,9 +184,6 @@ class MeteoFranceWeather(
|
||||
ATTR_FORECAST_NATIVE_TEMP: forecast["T"]["value"],
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: forecast["rain"].get("1h"),
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: forecast["wind"]["speed"],
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: forecast["wind"].get(
|
||||
"gust"
|
||||
),
|
||||
ATTR_FORECAST_WIND_BEARING: forecast["wind"]["direction"]
|
||||
if forecast["wind"]["direction"] != -1
|
||||
else None,
|
||||
|
||||
@@ -154,8 +154,6 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except ApiError as err:
|
||||
_LOGGER.error("Failed to connect to printer")
|
||||
raise CannotConnect from err
|
||||
finally:
|
||||
await self._sessions.pop().close()
|
||||
|
||||
await self.async_set_unique_id(discovery.upnp_uuid, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
@@ -264,12 +262,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
assert self._user_input is not None
|
||||
octoprint = self._get_octoprint_client(self._user_input)
|
||||
|
||||
try:
|
||||
self._user_input[CONF_API_KEY] = await octoprint.request_app_key(
|
||||
"Home Assistant", self._user_input[CONF_USERNAME], 300
|
||||
)
|
||||
finally:
|
||||
await self._sessions.pop().close()
|
||||
self._user_input[CONF_API_KEY] = await octoprint.request_app_key(
|
||||
"Home Assistant", self._user_input[CONF_USERNAME], 300
|
||||
)
|
||||
|
||||
def _get_octoprint_client(self, user_input: dict[str, Any]) -> OctoprintClient:
|
||||
"""Build an octoprint client from the user_input."""
|
||||
@@ -292,6 +287,11 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
path=user_input[CONF_PATH],
|
||||
)
|
||||
|
||||
def async_remove(self) -> None:
|
||||
"""Detach the session."""
|
||||
for session in self._sessions:
|
||||
session.detach()
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -23,6 +23,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import webhook
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_TOKEN,
|
||||
@@ -38,15 +39,21 @@ from .const import (
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_DEVICE_TYPE,
|
||||
CONF_USE_WEBHOOK,
|
||||
COORDINATOR,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE,
|
||||
DEVICE_ID,
|
||||
DEVICE_NAME,
|
||||
DEVICE_TYPE,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
SENSOR_DATA,
|
||||
UNDO_UPDATE_LISTENER,
|
||||
)
|
||||
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
|
||||
from .coordinator import PlaatoCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEPENDENCIES = ["webhook"]
|
||||
|
||||
SENSOR_UPDATE = f"{DOMAIN}_sensor_update"
|
||||
@@ -75,15 +82,15 @@ WEBHOOK_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Configure based on config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
if entry.data[CONF_USE_WEBHOOK]:
|
||||
async_setup_webhook(hass, entry)
|
||||
else:
|
||||
await async_setup_coordinator(hass, entry)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, [platform for platform in PLATFORMS if entry.options.get(platform, True)]
|
||||
)
|
||||
@@ -92,26 +99,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bo
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_webhook(hass: HomeAssistant, entry: PlaatoConfigEntry) -> None:
|
||||
def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Init webhook based on config entry."""
|
||||
webhook_id = entry.data[CONF_WEBHOOK_ID]
|
||||
device_name = entry.data[CONF_DEVICE_NAME]
|
||||
|
||||
entry.runtime_data = PlaatoData(
|
||||
coordinator=None,
|
||||
device_name=entry.data[CONF_DEVICE_NAME],
|
||||
device_type=entry.data[CONF_DEVICE_TYPE],
|
||||
device_id=None,
|
||||
)
|
||||
_set_entry_data(entry, hass)
|
||||
|
||||
webhook.async_register(
|
||||
hass, DOMAIN, f"{DOMAIN}.{device_name}", webhook_id, handle_webhook
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_coordinator(
|
||||
hass: HomeAssistant, entry: PlaatoConfigEntry
|
||||
) -> None:
|
||||
async def async_setup_coordinator(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Init auth token based on config entry."""
|
||||
auth_token = entry.data[CONF_TOKEN]
|
||||
device_type = entry.data[CONF_DEVICE_TYPE]
|
||||
@@ -126,44 +126,62 @@ async def async_setup_coordinator(
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = PlaatoData(
|
||||
coordinator=coordinator,
|
||||
device_name=entry.data[CONF_DEVICE_NAME],
|
||||
device_type=entry.data[CONF_DEVICE_TYPE],
|
||||
device_id=auth_token,
|
||||
)
|
||||
_set_entry_data(entry, hass, coordinator, auth_token)
|
||||
|
||||
for platform in PLATFORMS:
|
||||
if entry.options.get(platform, True):
|
||||
coordinator.platforms.append(platform)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
|
||||
def _set_entry_data(entry, hass, coordinator=None, device_id=None):
|
||||
device = {
|
||||
DEVICE_NAME: entry.data[CONF_DEVICE_NAME],
|
||||
DEVICE_TYPE: entry.data[CONF_DEVICE_TYPE],
|
||||
DEVICE_ID: device_id,
|
||||
}
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
COORDINATOR: coordinator,
|
||||
DEVICE: device,
|
||||
SENSOR_DATA: None,
|
||||
UNDO_UPDATE_LISTENER: entry.add_update_listener(_async_update_listener),
|
||||
}
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if entry.data[CONF_USE_WEBHOOK]:
|
||||
use_webhook = entry.data[CONF_USE_WEBHOOK]
|
||||
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
|
||||
|
||||
if use_webhook:
|
||||
return await async_unload_webhook(hass, entry)
|
||||
|
||||
return await async_unload_coordinator(hass, entry)
|
||||
|
||||
|
||||
async def async_unload_webhook(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
|
||||
async def async_unload_webhook(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload webhook based entry."""
|
||||
if entry.data[CONF_WEBHOOK_ID] is not None:
|
||||
webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
return await async_unload_platforms(hass, entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_unload_coordinator(
|
||||
hass: HomeAssistant, entry: PlaatoConfigEntry
|
||||
) -> bool:
|
||||
async def async_unload_coordinator(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload auth token based entry."""
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
entry, coordinator.platforms if coordinator else PLATFORMS
|
||||
)
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
|
||||
return await async_unload_platforms(hass, entry, coordinator.platforms)
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: PlaatoConfigEntry) -> None:
|
||||
async def async_unload_platforms(hass: HomeAssistant, entry: ConfigEntry, platforms):
|
||||
"""Unload platforms."""
|
||||
unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unloaded:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unloaded
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
@@ -8,17 +8,17 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CONF_USE_WEBHOOK
|
||||
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
|
||||
from .const import CONF_USE_WEBHOOK, COORDINATOR, DOMAIN
|
||||
from .entity import PlaatoEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: PlaatoConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Plaato from a config entry."""
|
||||
@@ -26,12 +26,10 @@ async def async_setup_entry(
|
||||
if config_entry.data[CONF_USE_WEBHOOK]:
|
||||
return
|
||||
|
||||
entry_data = config_entry.runtime_data
|
||||
coordinator = entry_data.coordinator
|
||||
assert coordinator is not None
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
|
||||
async_add_entities(
|
||||
PlaatoBinarySensor(
|
||||
entry_data,
|
||||
hass.data[DOMAIN][config_entry.entry_id],
|
||||
sensor_type,
|
||||
coordinator,
|
||||
)
|
||||
@@ -42,12 +40,7 @@ async def async_setup_entry(
|
||||
class PlaatoBinarySensor(PlaatoEntity, BinarySensorEntity):
|
||||
"""Representation of a Binary Sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: PlaatoData,
|
||||
sensor_type: str,
|
||||
coordinator: PlaatoCoordinator | None = None,
|
||||
) -> None:
|
||||
def __init__(self, data, sensor_type, coordinator=None) -> None:
|
||||
"""Initialize plaato binary sensor."""
|
||||
super().__init__(data, sensor_type, coordinator)
|
||||
if sensor_type is PlaatoKeg.Pins.LEAK_DETECTION:
|
||||
|
||||
@@ -19,7 +19,13 @@ PLACEHOLDER_DEVICE_TYPE = "device_type"
|
||||
PLACEHOLDER_DEVICE_NAME = "device_name"
|
||||
DOCS_URL = "https://www.home-assistant.io/integrations/plaato/"
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
SENSOR_DATA = "sensor_data"
|
||||
COORDINATOR = "coordinator"
|
||||
DEVICE = "device"
|
||||
DEVICE_NAME = "device_name"
|
||||
DEVICE_TYPE = "device_type"
|
||||
DEVICE_ID = "device_id"
|
||||
UNDO_UPDATE_LISTENER = "undo_update_listener"
|
||||
DEFAULT_SCAN_INTERVAL = 5
|
||||
MIN_UPDATE_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
"""Coordinator for Plaato devices."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyplaato.models.device import PlaatoDevice
|
||||
from pyplaato.plaato import Plaato, PlaatoDeviceType
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -18,29 +16,15 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlaatoData:
|
||||
"""Runtime data for the Plaato integration."""
|
||||
|
||||
coordinator: PlaatoCoordinator | None
|
||||
device_name: str
|
||||
device_type: str
|
||||
device_id: str | None
|
||||
sensor_data: PlaatoDevice | None = field(default=None)
|
||||
|
||||
|
||||
type PlaatoConfigEntry = ConfigEntry[PlaatoData]
|
||||
|
||||
|
||||
class PlaatoCoordinator(DataUpdateCoordinator[PlaatoDevice]):
|
||||
class PlaatoCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
config_entry: PlaatoConfigEntry
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: PlaatoConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
auth_token: str,
|
||||
device_type: PlaatoDeviceType,
|
||||
update_interval: timedelta,
|
||||
@@ -58,7 +42,7 @@ class PlaatoCoordinator(DataUpdateCoordinator[PlaatoDevice]):
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> PlaatoDevice:
|
||||
async def _async_update_data(self):
|
||||
"""Update data via library."""
|
||||
return await self.api.get_data(
|
||||
session=aiohttp_client.async_get_clientsession(self.hass),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""PlaatoEntity class."""
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from pyplaato.models.device import PlaatoDevice
|
||||
|
||||
@@ -8,8 +8,16 @@ from homeassistant.helpers import entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .const import DOMAIN, EXTRA_STATE_ATTRIBUTES, SENSOR_SIGNAL
|
||||
from .coordinator import PlaatoCoordinator, PlaatoData
|
||||
from .const import (
|
||||
DEVICE,
|
||||
DEVICE_ID,
|
||||
DEVICE_NAME,
|
||||
DEVICE_TYPE,
|
||||
DOMAIN,
|
||||
EXTRA_STATE_ATTRIBUTES,
|
||||
SENSOR_DATA,
|
||||
SENSOR_SIGNAL,
|
||||
)
|
||||
|
||||
|
||||
class PlaatoEntity(entity.Entity):
|
||||
@@ -17,20 +25,14 @@ class PlaatoEntity(entity.Entity):
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: PlaatoData,
|
||||
sensor_type: str,
|
||||
coordinator: PlaatoCoordinator | None = None,
|
||||
) -> None:
|
||||
def __init__(self, data, sensor_type, coordinator=None):
|
||||
"""Initialize the sensor."""
|
||||
self._coordinator = coordinator
|
||||
self._entry_data = data
|
||||
self._sensor_type = sensor_type
|
||||
assert self._entry_data.device_id is not None
|
||||
self._device_id = cast(str, data.device_id)
|
||||
self._device_type = data.device_type
|
||||
self._device_name = data.device_name
|
||||
self._device_id = data[DEVICE][DEVICE_ID]
|
||||
self._device_type = data[DEVICE][DEVICE_TYPE]
|
||||
self._device_name = data[DEVICE][DEVICE_NAME]
|
||||
self._attr_unique_id = f"{self._device_id}_{self._sensor_type}"
|
||||
self._attr_name = f"{DOMAIN} {self._device_type} {self._device_name} {self._sensor_name}".title()
|
||||
sw_version = None
|
||||
@@ -56,7 +58,7 @@ class PlaatoEntity(entity.Entity):
|
||||
def _sensor_data(self) -> PlaatoDevice:
|
||||
if self._coordinator:
|
||||
return self._coordinator.data
|
||||
return self._entry_data.sensor_data
|
||||
return self._entry_data[SENSOR_DATA]
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
|
||||
@@ -6,6 +6,7 @@ from pyplaato.models.device import PlaatoDevice
|
||||
from pyplaato.plaato import PlaatoKeg
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
@@ -18,8 +19,15 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import ATTR_TEMP, SENSOR_UPDATE
|
||||
from .const import CONF_USE_WEBHOOK, SENSOR_SIGNAL
|
||||
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
|
||||
from .const import (
|
||||
CONF_USE_WEBHOOK,
|
||||
COORDINATOR,
|
||||
DEVICE,
|
||||
DEVICE_ID,
|
||||
DOMAIN,
|
||||
SENSOR_DATA,
|
||||
SENSOR_SIGNAL,
|
||||
)
|
||||
from .entity import PlaatoEntity
|
||||
|
||||
|
||||
@@ -34,19 +42,19 @@ async def async_setup_platform(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: PlaatoConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Plaato from a config entry."""
|
||||
entry_data = entry.runtime_data
|
||||
entry_data = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
@callback
|
||||
def _async_update_from_webhook(device_id, sensor_data: PlaatoDevice):
|
||||
"""Update/Create the sensors."""
|
||||
entry_data.sensor_data = sensor_data
|
||||
entry_data[SENSOR_DATA] = sensor_data
|
||||
|
||||
if device_id != entry_data.device_id:
|
||||
entry_data.device_id = device_id
|
||||
if device_id != entry_data[DEVICE][DEVICE_ID]:
|
||||
entry_data[DEVICE][DEVICE_ID] = device_id
|
||||
async_add_entities(
|
||||
[
|
||||
PlaatoSensor(entry_data, sensor_type)
|
||||
@@ -60,8 +68,7 @@ async def async_setup_entry(
|
||||
if entry.data[CONF_USE_WEBHOOK]:
|
||||
async_dispatcher_connect(hass, SENSOR_UPDATE, _async_update_from_webhook)
|
||||
else:
|
||||
coordinator = entry_data.coordinator
|
||||
assert coordinator is not None
|
||||
coordinator = entry_data[COORDINATOR]
|
||||
async_add_entities(
|
||||
PlaatoSensor(entry_data, sensor_type, coordinator)
|
||||
for sensor_type in coordinator.data.sensors
|
||||
@@ -71,23 +78,18 @@ async def async_setup_entry(
|
||||
class PlaatoSensor(PlaatoEntity, SensorEntity):
|
||||
"""Representation of a Plaato Sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: PlaatoData,
|
||||
sensor_type: str,
|
||||
coordinator: PlaatoCoordinator | None = None,
|
||||
) -> None:
|
||||
def __init__(self, data, sensor_type, coordinator=None) -> None:
|
||||
"""Initialize plaato sensor."""
|
||||
super().__init__(data, sensor_type, coordinator)
|
||||
if sensor_type is PlaatoKeg.Pins.TEMPERATURE or sensor_type == ATTR_TEMP:
|
||||
self._attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float | None:
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._sensor_data.sensors.get(self._sensor_type)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
def native_unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return self._sensor_data.get_unit_of_measurement(self._sensor_type)
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/proxy",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["Pillow==12.2.0"]
|
||||
"requirements": ["Pillow==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -2,14 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pvo import (
|
||||
PVOutput,
|
||||
PVOutputAuthenticationError,
|
||||
PVOutputConnectionError,
|
||||
PVOutputError,
|
||||
PVOutputNoDataError,
|
||||
Status,
|
||||
)
|
||||
from pvo import PVOutput, PVOutputAuthenticationError, PVOutputNoDataError, Status
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
@@ -44,20 +37,7 @@ class PVOutputDataUpdateCoordinator(DataUpdateCoordinator[Status]):
|
||||
"""Fetch system status from PVOutput."""
|
||||
try:
|
||||
return await self.pvoutput.status()
|
||||
except PVOutputNoDataError as err:
|
||||
raise UpdateFailed("PVOutput has no data available") from err
|
||||
except PVOutputAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except PVOutputNoDataError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_data_available",
|
||||
) from err
|
||||
except PVOutputConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="communication_error",
|
||||
) from err
|
||||
except PVOutputError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unknown_error",
|
||||
) from err
|
||||
|
||||
@@ -42,16 +42,5 @@
|
||||
"name": "Power generation"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"communication_error": {
|
||||
"message": "An error occurred while communicating with the PVOutput service."
|
||||
},
|
||||
"no_data_available": {
|
||||
"message": "The PVOutput service has no data available for this system."
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "An unknown error occurred while communicating with the PVOutput service."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["pyzbar"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["Pillow==12.2.0", "pyzbar==0.1.7"]
|
||||
"requirements": ["Pillow==12.1.1", "pyzbar==0.1.7"]
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Provides conditions for remotes."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
|
||||
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the remote conditions."""
|
||||
return CONDITIONS
|
||||
@@ -1,17 +0,0 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: remote
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
@@ -1,12 +1,4 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:remote-off"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:remote"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:remote",
|
||||
|
||||
@@ -1,28 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"trigger_behavior_name": "Trigger when"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more remotes are off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::remote::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Remote is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more remotes are on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::remote::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Remote is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"toggle": "[%key:common::device_automation::action_type::toggle%]",
|
||||
@@ -52,12 +31,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/seven_segments",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["Pillow==12.2.0"]
|
||||
"requirements": ["Pillow==12.1.1"]
|
||||
}
|
||||
|
||||
@@ -128,16 +128,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> bo
|
||||
"""Set up Shelly from a config entry."""
|
||||
entry.runtime_data = ShellyEntryData([])
|
||||
|
||||
# The custom component for Shelly devices uses shelly domain as well as core
|
||||
# integration. If the user removes the custom component but doesn't remove the
|
||||
# config entry, core integration will try to configure that config entry with an
|
||||
# error. The config entry data for this custom component doesn't contain host
|
||||
# value, so if host isn't present, config entry will not be configured.
|
||||
# The community integration for Shelly devices uses Shelly domain as well as Core
|
||||
# integration. If the user removes the community integration but doesn't remove
|
||||
# the config entry, Core integration will try to configure that config entry with
|
||||
# an error. The config entry data for this community integration doesn't contain
|
||||
# host value, so if host isn't present, config entry will not be configured.
|
||||
if not entry.data.get(CONF_HOST):
|
||||
LOGGER.warning(
|
||||
(
|
||||
"The config entry %s probably comes from a custom integration, please"
|
||||
" remove it if you want to use core Shelly integration"
|
||||
"The config entry %s probably comes from a community integration, "
|
||||
"please remove it if you want to use the Core Shelly integration"
|
||||
),
|
||||
entry.title,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["simplehound"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["Pillow==12.2.0", "simplehound==0.3"]
|
||||
"requirements": ["Pillow==12.1.1", "simplehound==0.3"]
|
||||
}
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
"""Diagnostics support for Sunricher DALI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.diagnostics import REDACTED, async_redact_data
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_SERIAL_NUMBER
|
||||
from .types import DaliCenterConfigEntry
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from PySrDaliGateway import Device, Scene
|
||||
from PySrDaliGateway.types import SceneDeviceType
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_HOST,
|
||||
CONF_USERNAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_SERIAL_NUMBER,
|
||||
"dev_sn",
|
||||
}
|
||||
|
||||
ALLOWED_ENTRY_KEYS: tuple[str, ...] = (
|
||||
CONF_HOST,
|
||||
CONF_PORT,
|
||||
CONF_NAME,
|
||||
CONF_USERNAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_SERIAL_NUMBER,
|
||||
)
|
||||
|
||||
|
||||
def _serialize_entry_data(entry: DaliCenterConfigEntry) -> dict[str, Any]:
|
||||
"""Return entry data filtered by the whitelist."""
|
||||
return {key: entry.data[key] for key in ALLOWED_ENTRY_KEYS if key in entry.data}
|
||||
|
||||
|
||||
def _serialize_device(device: Device) -> dict[str, Any]:
|
||||
"""Return a whitelisted dict view of a Device."""
|
||||
return {
|
||||
"dev_id": device.dev_id,
|
||||
"unique_id": device.unique_id,
|
||||
"name": device.name,
|
||||
"dev_type": device.dev_type,
|
||||
"channel": device.channel,
|
||||
"address": device.address,
|
||||
"status": device.status,
|
||||
"dev_sn": device.dev_sn,
|
||||
"area_name": getattr(device, "area_name", None),
|
||||
"area_id": getattr(device, "area_id", None),
|
||||
"model": device.model,
|
||||
}
|
||||
|
||||
|
||||
def _serialize_scene(scene: Scene) -> dict[str, Any]:
|
||||
"""Return a whitelisted dict view of a Scene."""
|
||||
members: list[SceneDeviceType] = scene.devices
|
||||
return {
|
||||
"scene_id": scene.scene_id,
|
||||
"name": scene.name,
|
||||
"channel": scene.channel,
|
||||
"area_id": getattr(scene, "area_id", None),
|
||||
"unique_id": scene.unique_id,
|
||||
"device_unique_ids": [member["unique_id"] for member in members],
|
||||
}
|
||||
|
||||
|
||||
def _strip_gw_sn(data: Any, gw_sn: str) -> Any:
|
||||
"""Recursively replace gw_sn in string values and list items."""
|
||||
if isinstance(data, dict):
|
||||
return {key: _strip_gw_sn(value, gw_sn) for key, value in data.items()}
|
||||
if isinstance(data, list):
|
||||
return [_strip_gw_sn(item, gw_sn) for item in data]
|
||||
if isinstance(data, str):
|
||||
return data.replace(gw_sn, REDACTED)
|
||||
return data
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: DaliCenterConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
data = entry.runtime_data
|
||||
payload = {
|
||||
"entry_data": _serialize_entry_data(entry),
|
||||
"devices": [_serialize_device(device) for device in data.devices],
|
||||
"scenes": [_serialize_scene(scene) for scene in data.scenes],
|
||||
}
|
||||
return _strip_gw_sn(async_redact_data(payload, TO_REDACT), data.gateway.gw_sn)
|
||||
@@ -46,7 +46,7 @@ rules:
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery:
|
||||
status: exempt
|
||||
|
||||
@@ -52,14 +52,12 @@ class SwitchbotEventEntity(SwitchbotEntity, EventEntity):
|
||||
self._event = event
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.base_unique_id}-{event}"
|
||||
self._previous_doorbell_seq = int(
|
||||
coordinator.device.parsed_data.get("doorbell_seq", 0)
|
||||
)
|
||||
self._previous_value = False
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the entity attributes."""
|
||||
seq = int(self.parsed_data.get("doorbell_seq", 0))
|
||||
if seq not in (0, self._previous_doorbell_seq):
|
||||
value = bool(self.parsed_data.get(self._event, False))
|
||||
if value and not self._previous_value:
|
||||
self._trigger_event("ring")
|
||||
self._previous_doorbell_seq = seq
|
||||
self._previous_value = value
|
||||
|
||||
@@ -62,7 +62,6 @@ from .const import (
|
||||
ATTR_DIRECTORY_PATH,
|
||||
ATTR_DISABLE_NOTIF,
|
||||
ATTR_DISABLE_WEB_PREV,
|
||||
ATTR_DRAFT_ID,
|
||||
ATTR_FILE,
|
||||
ATTR_FILE_ID,
|
||||
ATTR_FILE_NAME,
|
||||
@@ -130,7 +129,6 @@ from .const import (
|
||||
SERVICE_SEND_LOCATION,
|
||||
SERVICE_SEND_MEDIA_GROUP,
|
||||
SERVICE_SEND_MESSAGE,
|
||||
SERVICE_SEND_MESSAGE_DRAFT,
|
||||
SERVICE_SEND_PHOTO,
|
||||
SERVICE_SEND_POLL,
|
||||
SERVICE_SEND_STICKER,
|
||||
@@ -178,19 +176,6 @@ SERVICE_SCHEMA_SEND_MESSAGE = vol.All(
|
||||
),
|
||||
)
|
||||
|
||||
SERVICE_SCHEMA_SEND_MESSAGE_DRAFT = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ENTITY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [vol.Coerce(int)]),
|
||||
vol.Optional(CONF_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Optional(ATTR_CHAT_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
|
||||
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
|
||||
vol.Required(ATTR_DRAFT_ID): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Required(ATTR_MESSAGE): cv.string,
|
||||
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_SCHEMA_SEND_CHAT_ACTION = vol.All(
|
||||
cv.deprecated(ATTR_TIMEOUT),
|
||||
vol.Schema(
|
||||
@@ -439,7 +424,6 @@ SERVICE_SCHEMA_DOWNLOAD_FILE = vol.Schema(
|
||||
|
||||
SERVICE_MAP: dict[str, VolSchemaType] = {
|
||||
SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE,
|
||||
SERVICE_SEND_MESSAGE_DRAFT: SERVICE_SCHEMA_SEND_MESSAGE_DRAFT,
|
||||
SERVICE_SEND_CHAT_ACTION: SERVICE_SCHEMA_SEND_CHAT_ACTION,
|
||||
SERVICE_SEND_PHOTO: SERVICE_SCHEMA_SEND_FILE,
|
||||
SERVICE_SEND_MEDIA_GROUP: SERVICE_SCHEMA_SEND_MEDIA_GROUP,
|
||||
@@ -631,8 +615,6 @@ async def _call_service(
|
||||
await notify_service.set_message_reaction(context=service.context, **kwargs)
|
||||
elif service_name == SERVICE_EDIT_MESSAGE_MEDIA:
|
||||
await notify_service.edit_message_media(context=service.context, **kwargs)
|
||||
elif service_name == SERVICE_SEND_MESSAGE_DRAFT:
|
||||
await notify_service.send_message_draft(context=service.context, **kwargs)
|
||||
elif service_name == SERVICE_DOWNLOAD_FILE:
|
||||
return await notify_service.download_file(context=service.context, **kwargs)
|
||||
else:
|
||||
|
||||
@@ -1013,36 +1013,6 @@ class TelegramNotificationService:
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def send_message_draft(
|
||||
self,
|
||||
message: str,
|
||||
chat_id: int,
|
||||
draft_id: int,
|
||||
context: Context | None = None,
|
||||
**kwargs: dict[str, Any],
|
||||
) -> None:
|
||||
"""Stream a partial message to a user while the message is being generated."""
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Sending message draft %s in chat ID %s with params: %s",
|
||||
draft_id,
|
||||
chat_id,
|
||||
params,
|
||||
)
|
||||
|
||||
await self._send_msg(
|
||||
self.bot.send_message_draft,
|
||||
None,
|
||||
chat_id=chat_id,
|
||||
draft_id=draft_id,
|
||||
text=message,
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def download_file(
|
||||
self,
|
||||
file_id: str,
|
||||
|
||||
@@ -31,7 +31,6 @@ DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4
|
||||
|
||||
SERVICE_SEND_CHAT_ACTION = "send_chat_action"
|
||||
SERVICE_SEND_MESSAGE = "send_message"
|
||||
SERVICE_SEND_MESSAGE_DRAFT = "send_message_draft"
|
||||
SERVICE_SEND_PHOTO = "send_photo"
|
||||
SERVICE_SEND_MEDIA_GROUP = "send_media_group"
|
||||
SERVICE_SEND_STICKER = "send_sticker"
|
||||
@@ -91,7 +90,6 @@ ATTR_DATE = "date"
|
||||
ATTR_DISABLE_NOTIF = "disable_notification"
|
||||
ATTR_DISABLE_WEB_PREV = "disable_web_page_preview"
|
||||
ATTR_DIRECTORY_PATH = "directory_path"
|
||||
ATTR_DRAFT_ID = "draft_id"
|
||||
ATTR_EDITED_MSG = "edited_message"
|
||||
ATTR_FILE = "file"
|
||||
ATTR_FILE_ID = "file_id"
|
||||
|
||||
@@ -49,9 +49,6 @@
|
||||
"send_message": {
|
||||
"service": "mdi:send"
|
||||
},
|
||||
"send_message_draft": {
|
||||
"service": "mdi:chat-processing"
|
||||
},
|
||||
"send_photo": {
|
||||
"service": "mdi:camera"
|
||||
},
|
||||
|
||||
@@ -1198,50 +1198,3 @@ download_file:
|
||||
example: "my_downloaded_file"
|
||||
selector:
|
||||
text:
|
||||
|
||||
send_message_draft:
|
||||
fields:
|
||||
entity_id:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: notify
|
||||
integration: telegram_bot
|
||||
multiple: true
|
||||
reorder: true
|
||||
message_thread_id:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
draft_id:
|
||||
required: true
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
min: 1
|
||||
message:
|
||||
example: The garage door has been o
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
parse_mode:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "html"
|
||||
- "markdown"
|
||||
- "markdownv2"
|
||||
- "plain_text"
|
||||
translation_key: "parse_mode"
|
||||
advanced:
|
||||
collapsed: true
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
chat_id:
|
||||
example: "[12345, 67890] or 12345"
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
|
||||
@@ -951,45 +951,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_message_draft": {
|
||||
"description": "Stream a partial message to a user while the message is being generated.",
|
||||
"fields": {
|
||||
"chat_id": {
|
||||
"description": "One or more pre-authorized chat IDs to send the message draft to.",
|
||||
"name": "[%key:component::telegram_bot::services::edit_message::fields::chat_id::name%]"
|
||||
},
|
||||
"config_entry_id": {
|
||||
"description": "The config entry representing the Telegram bot to send the message draft.",
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]"
|
||||
},
|
||||
"draft_id": {
|
||||
"description": "Unique identifier of the message draft. Changes of drafts with the same identifier are animated.",
|
||||
"name": "Draft ID"
|
||||
},
|
||||
"entity_id": {
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::entity_id::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::entity_id::name%]"
|
||||
},
|
||||
"message": {
|
||||
"description": "Available part of the message for temporary notification.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::message::name%]"
|
||||
},
|
||||
"message_thread_id": {
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::name%]"
|
||||
},
|
||||
"parse_mode": {
|
||||
"description": "[%key:component::telegram_bot::services::send_message::fields::parse_mode::description%]",
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::parse_mode::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Send message draft",
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::sections::advanced::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"send_photo": {
|
||||
"description": "Sends a photo.",
|
||||
"fields": {
|
||||
|
||||
@@ -180,16 +180,18 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema:
|
||||
}
|
||||
|
||||
if domain == Platform.BINARY_SENSOR:
|
||||
schema |= _SCHEMA_STATE | {
|
||||
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=[cls.value for cls in BinarySensorDeviceClass],
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
translation_key="binary_sensor_device_class",
|
||||
sort=True,
|
||||
schema |= _SCHEMA_STATE
|
||||
if flow_type == "config":
|
||||
schema |= {
|
||||
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=[cls.value for cls in BinarySensorDeviceClass],
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
translation_key="binary_sensor_device_class",
|
||||
sort=True,
|
||||
),
|
||||
),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
if domain == Platform.BUTTON:
|
||||
schema |= {
|
||||
|
||||
@@ -608,7 +608,6 @@
|
||||
},
|
||||
"binary_sensor": {
|
||||
"data": {
|
||||
"device_class": "[%key:component::template::common::device_class%]",
|
||||
"device_id": "[%key:common::config_flow::data::device%]",
|
||||
"state": "[%key:component::template::common::state%]"
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.37.1"]
|
||||
"requirements": ["pyTibber==0.37.0"]
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
"""Provides conditions for to-do lists."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
make_entity_numerical_condition,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"all_completed": make_entity_state_condition(DOMAIN, "0"),
|
||||
"incomplete": make_entity_numerical_condition(DOMAIN),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the to-do list conditions."""
|
||||
return CONDITIONS
|
||||
@@ -1,37 +0,0 @@
|
||||
.condition_common: &condition_common
|
||||
target: &condition_todo_target
|
||||
entity:
|
||||
domain: todo
|
||||
fields:
|
||||
behavior: &condition_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
.incomplete_threshold_entity: &incomplete_threshold_entity
|
||||
- domain: input_number
|
||||
- domain: number
|
||||
- domain: sensor
|
||||
|
||||
.incomplete_threshold_number: &incomplete_threshold_number
|
||||
min: 0
|
||||
mode: box
|
||||
|
||||
all_completed: *condition_common
|
||||
|
||||
incomplete:
|
||||
target: *condition_todo_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
numeric_threshold:
|
||||
entity: *incomplete_threshold_entity
|
||||
mode: is
|
||||
number: *incomplete_threshold_number
|
||||
@@ -1,12 +1,4 @@
|
||||
{
|
||||
"conditions": {
|
||||
"all_completed": {
|
||||
"condition": "mdi:clipboard-check"
|
||||
},
|
||||
"incomplete": {
|
||||
"condition": "mdi:clipboard-alert"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:clipboard-list"
|
||||
|
||||
@@ -1,31 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"condition_threshold_name": "Threshold type"
|
||||
},
|
||||
"conditions": {
|
||||
"all_completed": {
|
||||
"description": "Tests if all to-do items are completed in one or more to-do lists.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::todo::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "All to-do items completed"
|
||||
},
|
||||
"incomplete": {
|
||||
"description": "Tests the number of incomplete to-do items in one or more to-do lists.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::todo::common::condition_behavior_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::todo::common::condition_threshold_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Incomplete to-do items"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::todo::title%]"
|
||||
@@ -40,12 +13,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"status": {
|
||||
"options": {
|
||||
"completed": "Completed",
|
||||
|
||||
@@ -9,10 +9,9 @@ from typing import Any
|
||||
from unifi_access_api import ApiAuthError, ApiConnectionError, UnifiAccessApiClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IGNORE, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.util.ssl import create_no_verify_ssl_context
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -26,11 +25,6 @@ class UnifiAccessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init the config flow."""
|
||||
super().__init__()
|
||||
self._discovered_device: dict[str, Any] = {}
|
||||
|
||||
async def _validate_input(self, user_input: dict[str, Any]) -> dict[str, str]:
|
||||
"""Validate user input and return errors dict."""
|
||||
errors: dict[str, str] = {}
|
||||
@@ -123,66 +117,6 @@ class UnifiAccessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_integration_discovery(
|
||||
self, discovery_info: DiscoveryInfoType
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle discovery via unifi_discovery."""
|
||||
self._discovered_device = discovery_info
|
||||
source_ip = discovery_info["source_ip"]
|
||||
mac = discovery_info["hw_addr"].replace(":", "").upper()
|
||||
await self.async_set_unique_id(mac)
|
||||
for entry in self._async_current_entries():
|
||||
if entry.source == SOURCE_IGNORE:
|
||||
continue
|
||||
if entry.data.get(CONF_HOST) == source_ip:
|
||||
if not entry.unique_id:
|
||||
self.hass.config_entries.async_update_entry(entry, unique_id=mac)
|
||||
return self.async_abort(reason="already_configured")
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: source_ip})
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery and collect API token."""
|
||||
errors: dict[str, str] = {}
|
||||
discovery_info = self._discovered_device
|
||||
source_ip = discovery_info["source_ip"]
|
||||
|
||||
if user_input is not None:
|
||||
merged_input = {
|
||||
CONF_HOST: source_ip,
|
||||
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL, False),
|
||||
}
|
||||
errors = await self._validate_input(merged_input)
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title="UniFi Access",
|
||||
data=merged_input,
|
||||
)
|
||||
|
||||
name = discovery_info.get("hostname") or discovery_info.get("platform")
|
||||
if not name:
|
||||
short_mac = discovery_info["hw_addr"].replace(":", "").upper()[-6:]
|
||||
name = f"Access {short_mac}"
|
||||
placeholders = {
|
||||
"name": name,
|
||||
"ip_address": source_ip,
|
||||
}
|
||||
self.context["title_placeholders"] = placeholders
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
vol.Required(CONF_VERIFY_SSL, default=False): bool,
|
||||
}
|
||||
),
|
||||
description_placeholders=placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"name": "UniFi Access",
|
||||
"codeowners": ["@imhotep", "@RaHehl"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["unifi_discovery"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/unifi_access",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
|
||||
@@ -42,10 +42,8 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Discovery is handled via unifi_discovery dependency using SOURCE_INTEGRATION_DISCOVERY.
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
|
||||
@@ -12,17 +12,6 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"discovery_confirm": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "[%key:component::unifi_access::config::step::user::data_description::api_token%]",
|
||||
"verify_ssl": "[%key:component::unifi_access::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "A UniFi Access controller was discovered at {ip_address} ({name})."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
|
||||
@@ -9,5 +9,4 @@ DOMAIN = "unifi_discovery"
|
||||
# when initial discovery runs — the same pattern DHCP/SSDP use with manifest matchers.
|
||||
CONSUMER_MAPPING: dict[UnifiService, str] = {
|
||||
UnifiService.Protect: "unifiprotect",
|
||||
UnifiService.Access: "unifi_access",
|
||||
}
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import fields
|
||||
from dataclasses import asdict
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -25,24 +24,6 @@ DISCOVERY_INTERVAL = timedelta(minutes=60)
|
||||
DATA_DISCOVERY_STARTED: HassKey[bool] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
def _device_to_dict(device: UnifiDevice) -> dict[str, Any]:
|
||||
"""Convert a UnifiDevice to a plain dict.
|
||||
|
||||
Avoid dataclasses.asdict() because it calls copy.deepcopy() on non-builtin
|
||||
types. On Python 3.14+ deepcopy cannot pickle mappingproxy objects, and
|
||||
Enum members (used as dict keys in ``services``) internally reference
|
||||
``__members__`` which is a mappingproxy. This causes asdict() to crash
|
||||
with ``TypeError: cannot pickle 'mappingproxy' object``.
|
||||
"""
|
||||
data: dict[str, Any] = {}
|
||||
for f in fields(device):
|
||||
value = getattr(device, f.name)
|
||||
if isinstance(value, Mapping):
|
||||
value = dict(value)
|
||||
data[f.name] = value
|
||||
return data
|
||||
|
||||
|
||||
@callback
|
||||
def async_start_discovery(hass: HomeAssistant) -> None:
|
||||
"""Start discovery of UniFi devices."""
|
||||
@@ -93,5 +74,5 @@ def async_trigger_discovery(
|
||||
hass,
|
||||
domain,
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
data=_device_to_dict(device),
|
||||
data=asdict(device),
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "vicare",
|
||||
"name": "Viessmann ViCare",
|
||||
"codeowners": ["@CFenner", "@lackas"],
|
||||
"codeowners": ["@CFenner"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
|
||||
@@ -14,7 +14,6 @@ from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, INTEGRATION_TITLE
|
||||
@@ -116,13 +115,10 @@ async def _async_setup_coordinator(
|
||||
energy_coordinator = WaterFurnaceEnergyCoordinator(
|
||||
hass, device_client, entry, device_client.gwid
|
||||
)
|
||||
|
||||
# Defer the first energy refresh until HA has fully started so the
|
||||
# potentially large initial backfill doesn't compete with startup I/O.
|
||||
async def _async_start_energy(hass: HomeAssistant) -> None:
|
||||
await energy_coordinator.async_refresh()
|
||||
|
||||
entry.async_on_unload(async_at_started(hass, _async_start_energy))
|
||||
# Use async_refresh() instead of async_config_entry_first_refresh() so that
|
||||
# energy data failures (e.g. WFNoDataError for new accounts) don't block
|
||||
# the integration from loading. Realtime sensor data is the primary concern.
|
||||
await energy_coordinator.async_refresh()
|
||||
|
||||
return device_client.gwid, WaterFurnaceDeviceData(
|
||||
realtime=coordinator, energy=energy_coordinator
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import math
|
||||
import random
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from waterfurnace.waterfurnace import (
|
||||
@@ -42,13 +39,6 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BACKFILL_BATCH_DAYS = 5
|
||||
BACKFILL_LOOKBACK_DAYS = 395 # 13 Months
|
||||
BACKFILL_GAP_THRESHOLD = timedelta(days=BACKFILL_BATCH_DAYS)
|
||||
BACKFILL_DELAY_MIN_SECONDS = 5
|
||||
BACKFILL_DELAY_MAX_SECONDS = 30
|
||||
BACKFILL_MAX_EMPTY_DAYS = 15
|
||||
|
||||
|
||||
@dataclass
|
||||
class WaterFurnaceDeviceData:
|
||||
@@ -125,7 +115,6 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
self.client = client
|
||||
self.gwid = gwid
|
||||
self.statistic_id = f"{DOMAIN}:{gwid.lower()}_energy"
|
||||
self._backfill_task: asyncio.Task | None = None
|
||||
self._statistic_metadata = StatisticMetaData(
|
||||
has_sum=True,
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
@@ -155,43 +144,28 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
if not last_stat:
|
||||
return None
|
||||
entry = last_stat[self.statistic_id][0]
|
||||
if "sum" not in entry or "start" not in entry or entry["sum"] is None:
|
||||
if entry["sum"] is None:
|
||||
return None
|
||||
|
||||
return (entry["start"], entry["sum"])
|
||||
|
||||
def _fetch_energy_data(
|
||||
self, start_date: str, end_date: str
|
||||
) -> list[tuple[datetime, float]]:
|
||||
"""Fetch energy data and return list of (timestamp, kWh) tuples.
|
||||
|
||||
On auth failure, re-login once and retry the request.
|
||||
"""
|
||||
"""Fetch energy data and return list of (timestamp, kWh) tuples."""
|
||||
# Re-login to refresh the HTTP session token, which expires between
|
||||
# the 2-hour polling intervals.
|
||||
try:
|
||||
data = self.client.get_energy_data(
|
||||
start_date,
|
||||
end_date,
|
||||
frequency="1H",
|
||||
timezone_str=self.hass.config.time_zone,
|
||||
)
|
||||
except WFCredentialError:
|
||||
try:
|
||||
self.client.login()
|
||||
except WFCredentialError as err:
|
||||
raise UpdateFailed(
|
||||
"Authentication failed during energy data fetch"
|
||||
) from err
|
||||
try:
|
||||
data = self.client.get_energy_data(
|
||||
start_date,
|
||||
end_date,
|
||||
frequency="1H",
|
||||
timezone_str=self.hass.config.time_zone,
|
||||
)
|
||||
except WFCredentialError as err:
|
||||
raise UpdateFailed(
|
||||
"Authentication failed during energy data fetch"
|
||||
) from err
|
||||
self.client.login()
|
||||
except WFCredentialError as err:
|
||||
raise UpdateFailed(
|
||||
"Authentication failed during energy data fetch"
|
||||
) from err
|
||||
data = self.client.get_energy_data(
|
||||
start_date,
|
||||
end_date,
|
||||
frequency="1H",
|
||||
timezone_str=self.hass.config.time_zone,
|
||||
)
|
||||
return [
|
||||
(reading.timestamp, reading.total_power)
|
||||
for reading in data
|
||||
@@ -203,14 +177,10 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
readings: list[tuple[datetime, float]],
|
||||
last_ts: float,
|
||||
last_sum: float,
|
||||
current_hour_ts: float | None = None,
|
||||
now: datetime,
|
||||
) -> list[StatisticData]:
|
||||
"""Build hourly statistics from readings, skipping already-recorded ones.
|
||||
|
||||
When provided, current_hour_ts acts as an exclusive cutoff so readings at
|
||||
or after that timestamp are excluded, such as to skip the incomplete
|
||||
current hour during normal polling and backfill.
|
||||
"""
|
||||
"""Build hourly statistics from readings, skipping already-recorded ones."""
|
||||
current_hour_ts = now.replace(minute=0, second=0, microsecond=0).timestamp()
|
||||
statistics: list[StatisticData] = []
|
||||
seen_hours: set[float] = set()
|
||||
running_sum = last_sum
|
||||
@@ -218,7 +188,7 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
ts = timestamp.timestamp()
|
||||
if ts <= last_ts:
|
||||
continue
|
||||
if current_hour_ts is not None and ts >= current_hour_ts:
|
||||
if ts >= current_hour_ts:
|
||||
continue
|
||||
hour_ts = timestamp.replace(minute=0, second=0, microsecond=0).timestamp()
|
||||
if hour_ts in seen_hours:
|
||||
@@ -234,140 +204,23 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
return statistics
|
||||
|
||||
async def _async_backfill(
|
||||
self,
|
||||
start_dt: datetime,
|
||||
end_dt: datetime,
|
||||
initial_sum: float = 0.0,
|
||||
last_ts: float = -math.inf,
|
||||
) -> None:
|
||||
"""Backfill energy statistics by walking backwards in batches.
|
||||
|
||||
Collects all readings into memory, then inserts them chronologically
|
||||
in a single pass. Stops early if no data is found for
|
||||
BACKFILL_MAX_EMPTY_DAYS consecutive days.
|
||||
"""
|
||||
all_readings: list[tuple[datetime, float]] = []
|
||||
batch_end = end_dt
|
||||
local_tz = dt_util.DEFAULT_TIME_ZONE
|
||||
consecutive_empty_days = 0
|
||||
|
||||
while batch_end > start_dt:
|
||||
batch_start = max(batch_end - timedelta(days=BACKFILL_BATCH_DAYS), start_dt)
|
||||
start_str = batch_start.astimezone(local_tz).strftime("%Y-%m-%d")
|
||||
end_str = batch_end.astimezone(local_tz).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
parsed = await self.hass.async_add_executor_job(
|
||||
self._fetch_energy_data, start_str, end_str
|
||||
)
|
||||
except WFNoDataError:
|
||||
_LOGGER.debug(
|
||||
"No energy data for %s to %s, skipping", start_str, end_str
|
||||
)
|
||||
consecutive_empty_days += BACKFILL_BATCH_DAYS
|
||||
if consecutive_empty_days >= BACKFILL_MAX_EMPTY_DAYS:
|
||||
_LOGGER.debug(
|
||||
"No data for %d consecutive days, stopping backfill",
|
||||
consecutive_empty_days,
|
||||
)
|
||||
break
|
||||
batch_end = batch_start
|
||||
continue
|
||||
except UpdateFailed, WFException:
|
||||
_LOGGER.exception("Error fetching energy data during backfill")
|
||||
break
|
||||
|
||||
_LOGGER.debug(
|
||||
"Fetched %d readings for backfill batch %s to %s",
|
||||
len(parsed),
|
||||
start_str,
|
||||
end_str,
|
||||
)
|
||||
|
||||
all_readings.extend(parsed)
|
||||
consecutive_empty_days = 0
|
||||
|
||||
batch_end = batch_start
|
||||
if batch_end > start_dt:
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
BACKFILL_DELAY_MIN_SECONDS, BACKFILL_DELAY_MAX_SECONDS
|
||||
)
|
||||
)
|
||||
|
||||
if all_readings:
|
||||
# Exclude the incomplete current hour. Use local timezone so
|
||||
# the hour boundary is correct for partial-offset timezones
|
||||
# (e.g. UTC+5:30).
|
||||
current_hour_ts = (
|
||||
end_dt.astimezone(local_tz)
|
||||
.replace(minute=0, second=0, microsecond=0)
|
||||
.timestamp()
|
||||
)
|
||||
statistics = self._build_statistics(
|
||||
all_readings, last_ts, initial_sum, current_hour_ts
|
||||
)
|
||||
if statistics:
|
||||
async_add_external_statistics(
|
||||
self.hass, self._statistic_metadata, statistics
|
||||
)
|
||||
|
||||
def _backfill_done_callback(self, task: asyncio.Task[None]) -> None:
|
||||
"""Log any exception from a completed backfill task."""
|
||||
if task.cancelled():
|
||||
return
|
||||
if exc := task.exception():
|
||||
_LOGGER.error("Backfill task failed", exc_info=exc)
|
||||
|
||||
async def async_wait_backfill(self) -> None:
|
||||
"""Wait for any in-progress backfill task to complete."""
|
||||
if self._backfill_task:
|
||||
await self._backfill_task
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch energy data and insert statistics.
|
||||
|
||||
Handles three scenarios:
|
||||
1. No statistics exist → first-load backfill (background task)
|
||||
2. Last stat is older than gap threshold → gap backfill (background task)
|
||||
3. Last stat is recent → normal poll for recent data
|
||||
"""
|
||||
if self._backfill_task and not self._backfill_task.done():
|
||||
_LOGGER.debug("Backfill already in progress, skipping update")
|
||||
return
|
||||
|
||||
"""Fetch energy data and insert statistics."""
|
||||
last = await self._async_get_last_stat()
|
||||
now = dt_util.utcnow()
|
||||
|
||||
if last is None:
|
||||
# First load: backfill walking backwards from today
|
||||
start = now - timedelta(days=BACKFILL_LOOKBACK_DAYS)
|
||||
self._backfill_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self._async_backfill(start, now),
|
||||
f"waterfurnace_backfill_{self.gwid}",
|
||||
)
|
||||
self._backfill_task.add_done_callback(self._backfill_done_callback)
|
||||
return
|
||||
_LOGGER.info("No prior statistics found, fetching recent energy data")
|
||||
last_ts = 0.0
|
||||
last_sum = 0.0
|
||||
start_dt = now - timedelta(days=1)
|
||||
else:
|
||||
last_ts, last_sum = last
|
||||
start_dt = dt_util.utc_from_timestamp(last_ts)
|
||||
_LOGGER.debug("Last stat: ts=%s, sum=%s", start_dt.isoformat(), last_sum)
|
||||
|
||||
last_ts, last_sum = last
|
||||
last_dt = dt_util.utc_from_timestamp(last_ts)
|
||||
|
||||
if now - last_dt > BACKFILL_GAP_THRESHOLD:
|
||||
# Large gap detected, backfill using batches
|
||||
self._backfill_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self._async_backfill(last_dt, now, last_sum, last_ts),
|
||||
f"waterfurnace_backfill_{self.gwid}",
|
||||
)
|
||||
self._backfill_task.add_done_callback(self._backfill_done_callback)
|
||||
return
|
||||
|
||||
# Normal poll: fetch recent data (up to BACKFILL_GAP_THRESHOLD) and insert any missing hours
|
||||
_LOGGER.debug("Last stat: ts=%s, sum=%s", last_dt.isoformat(), last_sum)
|
||||
local_tz = dt_util.DEFAULT_TIME_ZONE
|
||||
start_date = last_dt.astimezone(local_tz).strftime("%Y-%m-%d")
|
||||
start_date = start_dt.astimezone(local_tz).strftime("%Y-%m-%d")
|
||||
end_date = (now.astimezone(local_tz) + timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
@@ -386,16 +239,7 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
|
||||
_LOGGER.debug("Fetched %s readings", len(readings))
|
||||
|
||||
# Use local timezone so the hour boundary is correct for
|
||||
# partial-offset timezones (e.g. UTC+5:30).
|
||||
current_hour_ts = (
|
||||
now.astimezone(local_tz)
|
||||
.replace(minute=0, second=0, microsecond=0)
|
||||
.timestamp()
|
||||
)
|
||||
statistics = self._build_statistics(
|
||||
readings, last_ts, last_sum, current_hour_ts
|
||||
)
|
||||
statistics = self._build_statistics(readings, last_ts, last_sum, now)
|
||||
|
||||
_LOGGER.debug("Built %s statistics to insert", len(statistics))
|
||||
|
||||
|
||||
@@ -181,19 +181,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
|
||||
def on_pipeline_event(self, event: PipelineEvent) -> None:
|
||||
"""Set state based on pipeline stage."""
|
||||
if event.type == assist_pipeline.PipelineEventType.RUN_END:
|
||||
# Pipeline run is complete — always update bookkeeping state
|
||||
# even after a disconnect so follow-up reconnects don't retain
|
||||
# stale _is_pipeline_running / _pipeline_ended_event state.
|
||||
self._is_pipeline_running = False
|
||||
self._pipeline_ended_event.set()
|
||||
self.device.set_is_active(False)
|
||||
self._tts_stream_token = None
|
||||
self._is_tts_streaming = False
|
||||
|
||||
if self._client is None:
|
||||
# Satellite disconnected, don't try to write to the client
|
||||
return
|
||||
assert self._client is not None
|
||||
|
||||
if event.type == assist_pipeline.PipelineEventType.RUN_START:
|
||||
if event.data and (tts_output := event.data["tts_output"]):
|
||||
@@ -202,6 +190,13 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
# can start streaming TTS before the TTS_END event.
|
||||
self._tts_stream_token = tts_output["token"]
|
||||
self._is_tts_streaming = False
|
||||
elif event.type == assist_pipeline.PipelineEventType.RUN_END:
|
||||
# Pipeline run is complete
|
||||
self._is_pipeline_running = False
|
||||
self._pipeline_ended_event.set()
|
||||
self.device.set_is_active(False)
|
||||
self._tts_stream_token = None
|
||||
self._is_tts_streaming = False
|
||||
elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START:
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
@@ -326,8 +321,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
|
||||
Should block until the announcement is done playing.
|
||||
"""
|
||||
if self._client is None:
|
||||
raise ConnectionError("Satellite is not connected")
|
||||
assert self._client is not None
|
||||
|
||||
if self._ffmpeg_manager is None:
|
||||
self._ffmpeg_manager = ffmpeg.get_ffmpeg_manager(self.hass)
|
||||
@@ -447,11 +441,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
# Stop any existing pipeline
|
||||
self._audio_queue.put_nowait(None)
|
||||
|
||||
# Cancel any pipeline still running so its background
|
||||
# tasks and audio buffers can be released instead of
|
||||
# being orphaned across the reconnect.
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
# Ensure sensor is off (before restart)
|
||||
self.device.set_is_active(False)
|
||||
|
||||
@@ -460,9 +449,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
finally:
|
||||
unregister_timer_handler()
|
||||
|
||||
# Cancel any pipeline still running on final teardown.
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
# Ensure sensor is off (before stop)
|
||||
self.device.set_is_active(False)
|
||||
|
||||
@@ -713,10 +699,10 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
|
||||
async def _send_delayed_ping(self) -> None:
|
||||
"""Send ping to satellite after a delay."""
|
||||
assert self._client is not None
|
||||
|
||||
try:
|
||||
await asyncio.sleep(_PING_SEND_DELAY)
|
||||
if self._client is None:
|
||||
return
|
||||
await self._client.write_event(Ping().event())
|
||||
except ConnectionError:
|
||||
pass # handled with timeout
|
||||
@@ -742,10 +728,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
|
||||
async def _stream_tts(self, tts_result: tts.ResultStream) -> None:
|
||||
"""Stream TTS WAV audio to satellite in chunks."""
|
||||
client = self._client
|
||||
if client is None:
|
||||
# Satellite disconnected, cannot stream
|
||||
return
|
||||
assert self._client is not None
|
||||
|
||||
if tts_result.extension != "wav":
|
||||
raise ValueError(
|
||||
@@ -777,7 +760,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
sample_rate, sample_width, sample_channels, data_chunk = (
|
||||
audio_info
|
||||
)
|
||||
await client.write_event(
|
||||
await self._client.write_event(
|
||||
AudioStart(
|
||||
rate=sample_rate,
|
||||
width=sample_width,
|
||||
@@ -811,12 +794,12 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
timestamp=timestamp,
|
||||
)
|
||||
|
||||
await client.write_event(audio_chunk.event())
|
||||
await self._client.write_event(audio_chunk.event())
|
||||
timestamp += audio_chunk.milliseconds
|
||||
total_seconds += audio_chunk.seconds
|
||||
data_chunk_idx += _AUDIO_CHUNK_BYTES
|
||||
|
||||
await client.write_event(AudioStop(timestamp=timestamp).event())
|
||||
await self._client.write_event(AudioStop(timestamp=timestamp).event())
|
||||
_LOGGER.debug("TTS streaming complete")
|
||||
finally:
|
||||
send_duration = time.monotonic() - start_time
|
||||
@@ -857,9 +840,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
|
||||
self, event_type: intent.TimerEventType, timer: intent.TimerInfo
|
||||
) -> None:
|
||||
"""Forward timer events to satellite."""
|
||||
if self._client is None:
|
||||
# Satellite disconnected, drop timer event
|
||||
return
|
||||
assert self._client is not None
|
||||
|
||||
_LOGGER.debug("Timer event: type=%s, info=%s", event_type, timer)
|
||||
event: Event | None = None
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"universal_silabs_flasher",
|
||||
"serialx"
|
||||
],
|
||||
"requirements": ["zha==1.1.2", "serialx==1.2.2"],
|
||||
"requirements": ["zha==1.1.2", "serialx==1.1.1"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -831,8 +831,8 @@ async def entity_service_call(
|
||||
if len(entities) == 1:
|
||||
# Single entity case avoids creating task
|
||||
entity = entities[0]
|
||||
single_response = await entity.async_request_call(
|
||||
_handle_entity_call(hass, entity, func, data, call.context)
|
||||
single_response = await _handle_entity_call(
|
||||
hass, entity, func, data, call.context
|
||||
)
|
||||
if entity.should_poll:
|
||||
# Context expires if the turn on commands took a long time.
|
||||
|
||||
@@ -50,7 +50,7 @@ openai==2.21.0
|
||||
orjson==3.11.7
|
||||
packaging>=23.1
|
||||
paho-mqtt==2.1.0
|
||||
Pillow==12.2.0
|
||||
Pillow==12.1.1
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
|
||||
@@ -58,7 +58,7 @@ dependencies = [
|
||||
"PyJWT==2.10.1",
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
"cryptography==46.0.7",
|
||||
"Pillow==12.2.0",
|
||||
"Pillow==12.1.1",
|
||||
"propcache==0.4.1",
|
||||
"pyOpenSSL==26.0.0",
|
||||
"orjson==3.11.7",
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -36,7 +36,7 @@ lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
orjson==3.11.7
|
||||
packaging>=23.1
|
||||
Pillow==12.2.0
|
||||
Pillow==12.1.1
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
|
||||
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@@ -38,7 +38,7 @@ PSNAWP==3.0.3
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
Pillow==12.2.0
|
||||
Pillow==12.1.1
|
||||
|
||||
# homeassistant.components.plex
|
||||
PlexAPI==4.15.16
|
||||
@@ -1247,7 +1247,7 @@ homekit-audio-proxy==1.2.1
|
||||
homelink-integration-api==0.0.1
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==2.8.0
|
||||
homematicip==2.7.0
|
||||
|
||||
# homeassistant.components.homevolt
|
||||
homevolt==0.5.0
|
||||
@@ -1928,7 +1928,7 @@ pyRFXtrx==0.31.1
|
||||
pySDCP==1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.37.1
|
||||
pyTibber==0.37.0
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -2092,7 +2092,7 @@ pyegps==0.2.5
|
||||
pyemoncms==0.1.3
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.4.8
|
||||
pyenphase==2.4.6
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@@ -2930,7 +2930,7 @@ sentry-sdk==2.48.0
|
||||
|
||||
# homeassistant.components.homeassistant_hardware
|
||||
# homeassistant.components.zha
|
||||
serialx==1.2.2
|
||||
serialx==1.1.1
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.1.1
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user