Compare commits

..

3 Commits

Author SHA1 Message Date
Erik
5b3953d78d Adjust language + sort 2026-01-22 08:09:06 +01:00
Erik Montnemery
5f843dd832 Merge branch 'dev' into add_lock_conditions 2026-01-22 08:08:06 +01:00
Erik
27a7b1a1a0 Add lock conditions 2026-01-21 16:39:01 +01:00
73 changed files with 398 additions and 806 deletions

View File

@@ -33,7 +33,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -122,7 +122,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -477,7 +477,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -297,7 +297,7 @@ jobs:
- &setup-python-matrix
name: Set up Python ${{ matrix.python-version }}
id: python
uses: &actions-setup-python actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true

View File

@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -35,7 +35,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true

View File

@@ -51,7 +51,7 @@ DEFAULT_NAME_HP = "HomePod"
BACKOFF_TIME_LOWER_LIMIT = 15 # seconds
BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
PLATFORMS = [Platform.BINARY_SENSOR, Platform.MEDIA_PLAYER, Platform.REMOTE]
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
AUTH_EXCEPTIONS = (
exceptions.AuthenticationError,

View File

@@ -1,63 +0,0 @@
"""Binary sensor support for Apple TV."""
from __future__ import annotations
from pyatv.const import KeyboardFocusState
from pyatv.interface import AppleTV, KeyboardListener
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AppleTvConfigEntry
from .entity import AppleTVEntity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AppleTvConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Load Apple TV binary sensor based on a config entry."""
# apple_tv config entries always have a unique id
assert config_entry.unique_id is not None
name: str = config_entry.data[CONF_NAME]
manager = config_entry.runtime_data
async_add_entities([AppleTVKeyboardFocused(name, config_entry.unique_id, manager)])
class AppleTVKeyboardFocused(AppleTVEntity, BinarySensorEntity, KeyboardListener):
"""Binary sensor for Text input focused."""
_attr_translation_key = "keyboard_focused"
_attr_available = True
@callback
def async_device_connected(self, atv: AppleTV) -> None:
"""Handle when connection is made to device."""
self._attr_available = True
# Listen to keyboard updates
atv.keyboard.listener = self
# Set initial state based on current focus state
self._update_state(atv.keyboard.text_focus_state == KeyboardFocusState.Focused)
@callback
def async_device_disconnected(self) -> None:
"""Handle when connection was lost to device."""
self._attr_available = False
self._update_state(False)
def focusstate_update(
self, old_state: KeyboardFocusState, new_state: KeyboardFocusState
) -> None:
"""Update keyboard state when it changes.
This is a callback function from pyatv.interface.KeyboardListener.
"""
self._update_state(new_state == KeyboardFocusState.Focused)
def _update_state(self, new_state: bool) -> None:
"""Update and report."""
self._attr_is_on = new_state
self.async_write_ha_state()

View File

@@ -18,6 +18,7 @@ class AppleTVEntity(Entity):
_attr_should_poll = False
_attr_has_entity_name = True
_attr_name = None
atv: AppleTVInterface | None = None
def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None:

View File

@@ -1,12 +0,0 @@
{
"entity": {
"binary_sensor": {
"keyboard_focused": {
"default": "mdi:keyboard",
"state": {
"off": "mdi:keyboard-off"
}
}
}
}
}

View File

@@ -115,7 +115,6 @@ class AppleTvMediaPlayer(
"""Representation of an Apple TV media player."""
_attr_supported_features = SUPPORT_APPLE_TV
_attr_name = None
def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None:
"""Initialize the Apple TV media player."""

View File

@@ -51,8 +51,6 @@ async def async_setup_entry(
class AppleTVRemote(AppleTVEntity, RemoteEntity):
"""Device that sends commands to an Apple TV."""
_attr_name = None
@property
def is_on(self) -> bool:
"""Return true if device is on."""

View File

@@ -62,13 +62,6 @@
}
}
},
"entity": {
"binary_sensor": {
"keyboard_focused": {
"name": "Keyboard focus"
}
}
},
"options": {
"step": {
"init": {

View File

@@ -125,9 +125,9 @@ NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
_EXPERIMENTAL_CONDITION_PLATFORMS = {
"alarm_control_panel",
"assist_satellite",
"device_tracker",
"fan",
"light",
"lock",
"siren",
}

View File

@@ -1,17 +0,0 @@
"""Provides conditions for device trackers."""
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from .const import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"is_home": make_entity_state_condition(DOMAIN, STATE_HOME),
"is_not_home": make_entity_state_condition(DOMAIN, STATE_NOT_HOME),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the conditions for device trackers."""
return CONDITIONS

View File

@@ -1,12 +1,4 @@
{
"conditions": {
"is_home": {
"condition": "mdi:account"
},
"is_not_home": {
"condition": "mdi:account-arrow-right"
}
},
"entity_component": {
"_": {
"default": "mdi:account",

View File

@@ -1,32 +1,8 @@
{
"common": {
"condition_behavior_description": "How the state should match on the targeted device trackers.",
"condition_behavior_name": "Behavior",
"trigger_behavior_description": "The behavior of the targeted device trackers to trigger on.",
"trigger_behavior_name": "Behavior"
},
"conditions": {
"is_home": {
"description": "Tests if one or more device trackers are home.",
"fields": {
"behavior": {
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
}
},
"name": "Device tracker is home"
},
"is_not_home": {
"description": "Tests if one or more device trackers are not home.",
"fields": {
"behavior": {
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
}
},
"name": "Device tracker is not home"
}
},
"device_automation": {
"condition_type": {
"is_home": "{entity_name} is home",
@@ -73,12 +49,6 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -7,6 +7,9 @@
"benzene": {
"default": "mdi:molecule"
},
"nitrogen_monoxide": {
"default": "mdi:molecule"
},
"non_methane_hydrocarbons": {
"default": "mdi:molecule"
}

View File

@@ -138,8 +138,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
),
AirQualitySensorEntityDescription(
key="no",
translation_key="nitrogen_monoxide",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.NITROGEN_MONOXIDE,
native_unit_of_measurement_fn=lambda x: x.pollutants.no.concentration.units,
value_fn=lambda x: x.pollutants.no.concentration.value,
exists_fn=lambda x: "no" in {p.code for p in x.pollutants},

View File

@@ -205,6 +205,9 @@
"so2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
}
},
"nitrogen_monoxide": {
"name": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]"
},
"non_methane_hydrocarbons": {
"name": "Non-methane hydrocarbons"
},

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from dataclasses import dataclass
import logging
from xml.etree.ElementTree import ParseError
from pyhik.constants import SENSOR_MAP
from pyhik.hikvision import HikCamera
@@ -89,12 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
def fetch_and_inject_nvr_events() -> None:
"""Fetch and inject NVR events in a single executor job."""
try:
nvr_events = camera.get_event_triggers(nvr_notification_methods)
except (requests.exceptions.RequestException, ParseError) as err:
_LOGGER.warning("Unable to fetch event triggers from %s: %s", host, err)
return
nvr_events = camera.get_event_triggers(nvr_notification_methods)
_LOGGER.debug("NVR events fetched with extended methods: %s", nvr_events)
if nvr_events:
# Map raw event type names to friendly names using SENSOR_MAP
@@ -107,12 +101,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
mapped_events[friendly_name] = list(channels)
_LOGGER.debug("Mapped NVR events: %s", mapped_events)
camera.inject_events(mapped_events)
else:
_LOGGER.debug(
"No event triggers returned from %s. "
"Ensure events are configured on the device",
host,
)
await hass.async_add_executor_job(fetch_and_inject_nvr_events)

View File

@@ -27,6 +27,7 @@ from homeassistant.const import (
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
@@ -35,7 +36,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import HikvisionConfigEntry
from .const import DEFAULT_PORT, DOMAIN
from .entity import HikvisionEntity
CONF_IGNORED = "ignored"
@@ -150,12 +150,7 @@ async def async_setup_entry(
sensors = camera.current_event_states
if sensors is None or not sensors:
_LOGGER.warning(
"Hikvision %s %s has no sensors available. "
"Ensure event detection is enabled and configured on the device",
data.device_type,
data.device_name,
)
_LOGGER.warning("Hikvision device has no sensors available")
return
async_add_entities(
@@ -169,9 +164,10 @@ async def async_setup_entry(
)
class HikvisionBinarySensor(HikvisionEntity, BinarySensorEntity):
class HikvisionBinarySensor(BinarySensorEntity):
"""Representation of a Hikvision binary sensor."""
_attr_has_entity_name = True
_attr_should_poll = False
def __init__(
@@ -181,14 +177,38 @@ class HikvisionBinarySensor(HikvisionEntity, BinarySensorEntity):
channel: int,
) -> None:
"""Initialize the binary sensor."""
super().__init__(entry, channel)
self._data = entry.runtime_data
self._camera = self._data.camera
self._sensor_type = sensor_type
self._channel = channel
# Build unique ID (includes sensor_type for uniqueness per sensor)
# Build unique ID
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
# Set entity name
self._attr_name = sensor_type
# Device info for device registry
if self._data.device_type == "NVR":
# NVR channels get their own device linked to the NVR via via_device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
via_device=(DOMAIN, self._data.device_id),
translation_key="nvr_channel",
translation_placeholders={
"device_name": self._data.device_name,
"channel_number": str(channel),
},
manufacturer="Hikvision",
model="NVR Channel",
)
self._attr_name = sensor_type
else:
# Single camera device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)
self._attr_name = sensor_type
# Set device class
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)

View File

@@ -5,10 +5,11 @@ from __future__ import annotations
from homeassistant.components.camera import Camera, CameraEntityFeature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HikvisionConfigEntry
from .entity import HikvisionEntity
from .const import DOMAIN
PARALLEL_UPDATES = 0
@@ -34,9 +35,10 @@ async def async_setup_entry(
async_add_entities(entities)
class HikvisionCamera(HikvisionEntity, Camera):
class HikvisionCamera(Camera):
"""Representation of a Hikvision camera."""
_attr_has_entity_name = True
_attr_name = None
_attr_supported_features = CameraEntityFeature.STREAM
@@ -46,11 +48,37 @@ class HikvisionCamera(HikvisionEntity, Camera):
channel: int,
) -> None:
"""Initialize the camera."""
super().__init__(entry, channel)
super().__init__()
self._data = entry.runtime_data
self._channel = channel
self._camera = self._data.camera
# Build unique ID (unique per platform per integration)
self._attr_unique_id = f"{self._data.device_id}_{channel}"
# Device info for device registry
if self._data.device_type == "NVR":
# NVR channels get their own device linked to the NVR via via_device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
via_device=(DOMAIN, self._data.device_id),
translation_key="nvr_channel",
translation_placeholders={
"device_name": self._data.device_name,
"channel_number": str(channel),
},
manufacturer="Hikvision",
model="NVR Channel",
)
else:
# Single camera device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)
async def async_camera_image(
self, width: int | None = None, height: int | None = None
) -> bytes | None:

View File

@@ -1,49 +0,0 @@
"""Base entity for Hikvision integration."""
from __future__ import annotations
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
from . import HikvisionConfigEntry, HikvisionData
from .const import DOMAIN
class HikvisionEntity(Entity):
"""Base class for Hikvision entities."""
_attr_has_entity_name = True
def __init__(
self,
entry: HikvisionConfigEntry,
channel: int,
) -> None:
"""Initialize the entity."""
super().__init__()
self._data: HikvisionData = entry.runtime_data
self._camera = self._data.camera
self._channel = channel
# Device info for device registry
if self._data.device_type == "NVR":
# NVR channels get their own device linked to the NVR via via_device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
via_device=(DOMAIN, self._data.device_id),
translation_key="nvr_channel",
translation_placeholders={
"device_name": self._data.device_name,
"channel_number": str(channel),
},
manufacturer="Hikvision",
model="NVR Channel",
)
else:
# Single camera device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)

View File

@@ -108,6 +108,7 @@ _DEFAULT_BIND = ["0.0.0.0", "::"] if _HAS_IPV6 else ["0.0.0.0"]
HTTP_SCHEMA: Final = vol.All(
cv.deprecated(CONF_BASE_URL),
cv.deprecated(CONF_SERVER_HOST), # Deprecated in HA Core 2025.12
vol.Schema(
{
vol.Optional(CONF_SERVER_HOST): vol.All(
@@ -208,15 +209,20 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if conf is None:
conf = cast(ConfData, HTTP_SCHEMA({}))
if CONF_SERVER_HOST in conf and is_hassio(hass):
issue_id = "server_host_deprecated_hassio"
if CONF_SERVER_HOST in conf:
if is_hassio(hass):
issue_id = "server_host_deprecated_hassio"
severity = ir.IssueSeverity.ERROR
else:
issue_id = "server_host_deprecated"
severity = ir.IssueSeverity.WARNING
ir.async_create_issue(
hass,
DOMAIN,
issue_id,
breaks_in_ha_version="2026.6.0",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
severity=severity,
translation_key=issue_id,
)

View File

@@ -1,5 +1,9 @@
{
"issues": {
"server_host_deprecated": {
"description": "The `server_host` configuration option in the HTTP integration is deprecated and will be removed.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
"title": "The `server_host` HTTP configuration option is deprecated"
},
"server_host_deprecated_hassio": {
"description": "The deprecated `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
"title": "The `server_host` HTTP configuration may break Home Assistant Core - Supervisor communication"

View File

@@ -19,7 +19,7 @@
"loggers": ["pyinsteon", "pypubsub"],
"requirements": [
"pyinsteon==1.6.4",
"insteon-frontend-home-assistant==0.6.1"
"insteon-frontend-home-assistant==0.6.0"
],
"single_config_entry": true,
"usb": [

View File

@@ -0,0 +1,18 @@
"""Provides conditions for locks."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from .const import DOMAIN, LockState
CONDITIONS: dict[str, type[Condition]] = {
"is_jammed": make_entity_state_condition(DOMAIN, LockState.JAMMED),
"is_locked": make_entity_state_condition(DOMAIN, LockState.LOCKED),
"is_open": make_entity_state_condition(DOMAIN, LockState.OPEN),
"is_unlocked": make_entity_state_condition(DOMAIN, LockState.UNLOCKED),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the conditions for locks."""
return CONDITIONS

View File

@@ -1,7 +1,7 @@
.condition_common: &condition_common
target:
entity:
domain: device_tracker
domain: lock
fields:
behavior:
required: true
@@ -13,5 +13,7 @@
- all
- any
is_home: *condition_common
is_not_home: *condition_common
is_jammed: *condition_common
is_locked: *condition_common
is_open: *condition_common
is_unlocked: *condition_common

View File

@@ -1,4 +1,18 @@
{
"conditions": {
"is_jammed": {
"condition": "mdi:lock-alert"
},
"is_locked": {
"condition": "mdi:lock"
},
"is_open": {
"condition": "mdi:lock-open-variant"
},
"is_unlocked": {
"condition": "mdi:lock-open-variant"
}
},
"entity_component": {
"_": {
"default": "mdi:lock",

View File

@@ -1,8 +1,52 @@
{
"common": {
"condition_behavior_description": "How the state should match on the targeted locks.",
"condition_behavior_name": "Behavior",
"trigger_behavior_description": "The behavior of the targeted locks to trigger on.",
"trigger_behavior_name": "Behavior"
},
"conditions": {
"is_jammed": {
"description": "Tests if one or more locks are jammed.",
"fields": {
"behavior": {
"description": "[%key:component::lock::common::condition_behavior_description%]",
"name": "[%key:component::lock::common::condition_behavior_name%]"
}
},
"name": "Lock is jammed"
},
"is_locked": {
"description": "Tests if one or more locks are locked.",
"fields": {
"behavior": {
"description": "[%key:component::lock::common::condition_behavior_description%]",
"name": "[%key:component::lock::common::condition_behavior_name%]"
}
},
"name": "Lock is locked"
},
"is_open": {
"description": "Tests if one or more locks are open.",
"fields": {
"behavior": {
"description": "[%key:component::lock::common::condition_behavior_description%]",
"name": "[%key:component::lock::common::condition_behavior_name%]"
}
},
"name": "Lock is open"
},
"is_unlocked": {
"description": "Tests if one or more locks are unlocked.",
"fields": {
"behavior": {
"description": "[%key:component::lock::common::condition_behavior_description%]",
"name": "[%key:component::lock::common::condition_behavior_name%]"
}
},
"name": "Lock is unlocked"
}
},
"device_automation": {
"action_type": {
"lock": "Lock {entity_name}",
@@ -55,6 +99,12 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -10,6 +10,6 @@
"iot_class": "local_push",
"loggers": ["music_assistant"],
"quality_scale": "bronze",
"requirements": ["music-assistant-client==1.3.3"],
"requirements": ["music-assistant-client==1.3.2"],
"zeroconf": ["_mass._tcp.local."]
}

View File

@@ -253,7 +253,7 @@ class NumberDeviceClass(StrEnum):
NITROGEN_MONOXIDE = "nitrogen_monoxide"
"""Amount of NO.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `μg/m³`
"""
NITROUS_OXIDE = "nitrous_oxide"
@@ -521,10 +521,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROGEN_MONOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_polling",
"loggers": ["opower"],
"quality_scale": "bronze",
"requirements": ["opower==0.16.5"]
"requirements": ["opower==0.16.4"]
}

View File

@@ -60,7 +60,6 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -229,7 +228,6 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
CarbonMonoxideConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
SulphurDioxideConcentrationConverter,
TemperatureDeltaConverter,

View File

@@ -34,7 +34,6 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -95,9 +94,6 @@ UNIT_SCHEMA = vol.Schema(
vol.Optional("nitrogen_dioxide"): vol.In(
NitrogenDioxideConcentrationConverter.VALID_UNITS
),
vol.Optional("nitrogen_monoxide"): vol.In(
NitrogenMonoxideConcentrationConverter.VALID_UNITS
),
vol.Optional("ozone"): vol.In(OzoneConcentrationConverter.VALID_UNITS),
vol.Optional("power"): vol.In(PowerConverter.VALID_UNITS),
vol.Optional("pressure"): vol.In(PressureConverter.VALID_UNITS),

View File

@@ -64,7 +64,6 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -292,7 +291,7 @@ class SensorDeviceClass(StrEnum):
NITROGEN_MONOXIDE = "nitrogen_monoxide"
"""Amount of NO.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `μg/m³`
"""
NITROUS_OXIDE = "nitrous_oxide"
@@ -567,7 +566,6 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
SensorDeviceClass.ENERGY_STORAGE: EnergyConverter,
SensorDeviceClass.GAS: VolumeConverter,
SensorDeviceClass.NITROGEN_DIOXIDE: NitrogenDioxideConcentrationConverter,
SensorDeviceClass.NITROGEN_MONOXIDE: NitrogenMonoxideConcentrationConverter,
SensorDeviceClass.OZONE: OzoneConcentrationConverter,
SensorDeviceClass.POWER: PowerConverter,
SensorDeviceClass.POWER_FACTOR: UnitlessRatioConverter,
@@ -641,10 +639,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROGEN_MONOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,

View File

@@ -355,3 +355,16 @@ class TriggerAlarmControlPanelEntity(TriggerEntity, AbstractTemplateAlarmControl
"""Restore last state."""
await super().async_added_to_hass()
await self._async_handle_restored_state()
@callback
def _handle_coordinator_update(self) -> None:
"""Handle update of the data."""
self._process_data()
if not self.available:
self.async_write_ha_state()
return
if self.handle_rendered_result(CONF_STATE):
self.async_set_context(self.coordinator.data["context"])
self.async_write_ha_state()

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from abc import abstractmethod
from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import partial
@@ -184,32 +183,8 @@ class AbstractTemplateBinarySensor(
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._template: template.Template = config[CONF_STATE]
self._delay_on = None
self._delay_off = None
self._delay_cancel: CALLBACK_TYPE | None = None
self.setup_state_template(
CONF_STATE,
"_attr_is_on",
on_update=self._update_state,
)
self._delay_on = None
try:
self._delay_on = cv.positive_time_period(config.get(CONF_DELAY_ON))
except vol.Invalid:
self.setup_template(CONF_DELAY_ON, "_delay_on", cv.positive_time_period)
self._delay_off = None
try:
self._delay_off = cv.positive_time_period(config.get(CONF_DELAY_OFF))
except vol.Invalid:
self.setup_template(CONF_DELAY_OFF, "_delay_off", cv.positive_time_period)
@callback
@abstractmethod
def _update_state(self, result: Any) -> None:
"""Update the state."""
class StateBinarySensorEntity(TemplateEntity, AbstractTemplateBinarySensor):
"""A virtual binary sensor that triggers from another sensor."""
@@ -225,15 +200,17 @@ class StateBinarySensorEntity(TemplateEntity, AbstractTemplateBinarySensor):
"""Initialize the Template binary sensor."""
TemplateEntity.__init__(self, hass, config, unique_id)
AbstractTemplateBinarySensor.__init__(self, config)
self._delay_on = None
self._delay_on_template = config.get(CONF_DELAY_ON)
self._delay_off = None
self._delay_off_template = config.get(CONF_DELAY_OFF)
async def async_added_to_hass(self) -> None:
"""Restore state."""
if (
(
CONF_DELAY_ON in self._templates
or CONF_DELAY_OFF in self._templates
or self._delay_on is not None
or self._delay_off is not None
self._delay_on_template is not None
or self._delay_off_template is not None
)
and (last_state := await self.async_get_last_state()) is not None
and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE)
@@ -241,6 +218,29 @@ class StateBinarySensorEntity(TemplateEntity, AbstractTemplateBinarySensor):
self._attr_is_on = last_state.state == STATE_ON
await super().async_added_to_hass()
@callback
def _async_setup_templates(self) -> None:
"""Set up templates."""
self.add_template_attribute("_state", self._template, None, self._update_state)
if self._delay_on_template is not None:
try:
self._delay_on = cv.positive_time_period(self._delay_on_template)
except vol.Invalid:
self.add_template_attribute(
"_delay_on", self._delay_on_template, cv.positive_time_period
)
if self._delay_off_template is not None:
try:
self._delay_off = cv.positive_time_period(self._delay_off_template)
except vol.Invalid:
self.add_template_attribute(
"_delay_off", self._delay_off_template, cv.positive_time_period
)
super()._async_setup_templates()
@callback
def _update_state(self, result):
super()._update_state(result)
@@ -291,11 +291,15 @@ class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
TriggerEntity.__init__(self, hass, coordinator, config)
AbstractTemplateBinarySensor.__init__(self, config)
for key in (CONF_STATE, CONF_DELAY_ON, CONF_DELAY_OFF, CONF_AUTO_OFF):
if isinstance(config.get(key), template.Template):
self._to_render_simple.append(key)
self._parse_result.add(key)
self._last_delay_from: bool | None = None
self._last_delay_to: bool | None = None
self._auto_off_cancel: CALLBACK_TYPE | None = None
self._auto_off_time: datetime | None = None
self.setup_template(CONF_AUTO_OFF, "_auto_off_time", cv.positive_time_period)
async def async_added_to_hass(self) -> None:
"""Restore last state."""
@@ -325,26 +329,17 @@ class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
self._set_auto_off(auto_off_time)
@callback
def _cancel_delays(self):
if self._delay_cancel:
self._delay_cancel()
self._delay_cancel = None
def _handle_coordinator_update(self) -> None:
"""Handle update of the data."""
self._process_data()
if self._auto_off_cancel:
self._auto_off_cancel()
self._auto_off_cancel = None
self._auto_off_time = None
@callback
def _update_state(self, result):
raw = self._rendered.get(CONF_STATE)
state: bool | None = None
if result is not None:
state = template.result_as_boolean(result)
if raw is not None:
state = template.result_as_boolean(raw)
if state:
delay = self._rendered.get(CONF_DELAY_ON) or self._delay_on
else:
delay = self._rendered.get(CONF_DELAY_OFF) or self._delay_off
key = CONF_DELAY_ON if state else CONF_DELAY_OFF
delay = self._rendered.get(key) or self._config.get(key)
if (
self._delay_cancel
@@ -354,7 +349,18 @@ class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
):
return
self._cancel_delays()
if self._delay_cancel:
self._delay_cancel()
self._delay_cancel = None
if self._auto_off_cancel:
self._auto_off_cancel()
self._auto_off_cancel = None
self._auto_off_time = None
if not self.available:
self.async_write_ha_state()
return
# state without delay.
if self._attr_is_on == state or delay is None:
@@ -365,7 +371,6 @@ class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
try:
delay = cv.positive_time_period(delay)
except vol.Invalid as err:
key = CONF_DELAY_ON if state else CONF_DELAY_OFF
logging.getLogger(__name__).warning(
"Error rendering %s template: %s", key, err
)
@@ -407,14 +412,6 @@ class TriggerBinarySensorEntity(TriggerEntity, AbstractTemplateBinarySensor):
auto_off_time = dt_util.utcnow() + auto_off_delay
self._set_auto_off(auto_off_time)
def _render_availability_template(self, variables):
available = super()._render_availability_template(variables)
if not available:
# Cancel any delay_on, delay_off, or auto_off when
# the entity goes unavailable
self._cancel_delays()
return available
def _set_auto_off(self, auto_off_time: datetime) -> None:
@callback
def _auto_off(_):

View File

@@ -500,6 +500,7 @@ class TriggerCoverEntity(TriggerEntity, AbstractTemplateCover):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -96,30 +96,6 @@ class AbstractTemplateEntity(Entity):
) -> None:
"""Set up a template that manages the main state of the entity."""
@abstractmethod
def setup_template(
self,
option: str,
attribute: str,
validator: Callable[[Any], Any] | None = None,
on_update: Callable[[Any], None] | None = None,
) -> None:
"""Set up a template that manages any property or attribute of the entity.
Parameters
----------
option
The configuration key provided by ConfigFlow or the yaml option
attribute
The name of the attribute to link to. This attribute must exist
unless a custom on_update method is supplied.
validator:
Optional function that validates the rendered result.
on_update:
Called to store the template result rather than storing it
the supplied attribute. Passed the result of the validator.
"""
def add_template(
self,
option: str,
@@ -133,11 +109,7 @@ class AbstractTemplateEntity(Entity):
if (template := self._config.get(option)) and isinstance(template, Template):
if add_if_static or (not template.is_static):
self._templates[option] = EntityTemplate(
attribute,
template,
validator,
on_update,
none_on_template_error,
attribute, template, validator, on_update, none_on_template_error
)
return template

View File

@@ -224,6 +224,7 @@ class TriggerEventEntity(TriggerEntity, AbstractTemplateEvent, RestoreEntity):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
for key, updater in (

View File

@@ -552,6 +552,7 @@ class TriggerFanEntity(TriggerEntity, AbstractTemplateFan):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -180,4 +180,3 @@ class TriggerImageEntity(TriggerEntity, AbstractTemplateImage):
"""Process new data."""
super()._process_data()
self._handle_state(self._rendered.get(CONF_URL))
self.async_write_ha_state()

View File

@@ -1123,6 +1123,7 @@ class TriggerLightEntity(TriggerEntity, AbstractTemplateLight):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -377,6 +377,7 @@ class TriggerLockEntity(TriggerEntity, AbstractTemplateLock):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -236,6 +236,7 @@ class TriggerNumberEntity(TriggerEntity, AbstractTemplateNumber):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -209,6 +209,7 @@ class TriggerSelectEntity(TriggerEntity, AbstractTemplateSelect):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -323,4 +323,3 @@ class TriggerSensorEntity(TriggerEntity, AbstractTemplateSensor):
rendered = self._rendered.get(CONF_STATE)
self._handle_state(rendered)
self.async_write_ha_state()

View File

@@ -281,6 +281,7 @@ class TriggerSwitchEntity(TriggerEntity, AbstractTemplateSwitch):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -303,30 +303,6 @@ class TemplateEntity(AbstractTemplateEntity):
self.add_template(option, attribute, on_update=_update_state)
def setup_template(
self,
option: str,
attribute: str,
validator: Callable[[Any], Any] | None = None,
on_update: Callable[[Any], None] | None = None,
):
"""Set up a template that manages any property or attribute of the entity.
Parameters
----------
option
The configuration key provided by ConfigFlow or the yaml option
attribute
The name of the attribute to link to. This attribute must exist
unless a custom on_update method is supplied.
validator:
Optional function that validates the rendered result.
on_update:
Called to store the template result rather than storing it
the supplied attribute. Passed the result of the validator.
"""
self.add_template(option, attribute, validator, on_update, True)
def add_template_attribute(
self,
attribute: str,

View File

@@ -59,33 +59,10 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
on_update: Callable[[Any], None] | None = None,
) -> None:
"""Set up a template that manages the main state of the entity."""
if self.add_template(option, attribute, validator, on_update):
self._to_render_simple.append(option)
self._parse_result.add(option)
def setup_template(
self,
option: str,
attribute: str,
validator: Callable[[Any], Any] | None = None,
on_update: Callable[[Any], None] | None = None,
) -> None:
"""Set up a template that manages any property or attribute of the entity.
Parameters
----------
option
The configuration key provided by ConfigFlow or the yaml option
attribute
The name of the attribute to link to. This attribute must exist
unless a custom on_update method is supplied.
validator:
Optional function that validates the rendered result.
on_update:
Called to store the template result rather than storing it
the supplied attribute. Passed the result of the validator.
"""
self.setup_state_template(option, attribute, validator, on_update)
if self._config.get(option):
self._to_render_simple.append(CONF_STATE)
self._parse_result.add(CONF_STATE)
self.add_template(option, attribute, validator, on_update)
@property
def referenced_blueprint(self) -> str | None:
@@ -126,35 +103,21 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
self._render_attributes(rendered, variables)
self._rendered = rendered
def _handle_rendered_results(self) -> bool:
def handle_rendered_result(self, key: str) -> bool:
"""Get a rendered result and return the value."""
# Handle any templates.
for option, entity_template in self._templates.items():
value = _SENTINEL
if (rendered := self._rendered.get(option)) is not None:
if (rendered := self._rendered.get(key)) is not None:
if (entity_template := self._templates.get(key)) is not None:
value = rendered
if entity_template.validator:
value = entity_template.validator(rendered)
if entity_template.validator:
value = entity_template.validator(rendered)
if entity_template.on_update:
entity_template.on_update(value)
else:
setattr(self, entity_template.attribute, value)
# Capture templates that did not render a result due to an exception and
# ensure the state object updates. _SENTINEL is used to differentiate
# templates that render None.
if value is _SENTINEL:
return True
if entity_template.on_update:
entity_template.on_update(value)
else:
setattr(self, entity_template.attribute, value)
return True
if len(self._rendered) > 0:
# In some cases, the entity may be state optimistic or
# attribute optimistic, in these scenarios the state needs
# to update.
return True
return False
@callback
@@ -173,35 +136,13 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module
else:
self._rendered_entity_variables = coordinator_variables
variables = self._template_variables(self._rendered_entity_variables)
self.async_set_context(self.coordinator.data["context"])
if self._render_availability_template(variables):
self._render_templates(variables)
write_state = False
# While transitioning platforms to the new framework, this
# if-statement is necessary for backward compatibility with existing
# trigger based platforms.
if self._templates:
# Handle any results that were rendered.
write_state = self._handle_rendered_results()
# Check availability after rendering the results because the state
# template could render the entity unavailable
if not self.available:
write_state = True
if write_state:
self.async_write_ha_state()
else:
self.async_write_ha_state()
self.async_set_context(self.coordinator.data["context"])
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator.
While transitioning platforms to the new framework, this
function is necessary for backward compatibility with existing
trigger based platforms.
"""
"""Handle updated data from the coordinator."""
self._process_data()
self.async_write_ha_state()

View File

@@ -438,6 +438,7 @@ class TriggerUpdateEntity(TriggerEntity, AbstractTemplateUpdate):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -489,6 +489,7 @@ class TriggerVacuumEntity(TriggerEntity, AbstractTemplateVacuum):
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -747,6 +747,7 @@ class TriggerWeatherEntity(TriggerEntity, AbstractTemplateWeather, RestoreEntity
self._process_data()
if not self.available:
self.async_write_ha_state()
return
write_ha_state = False

View File

@@ -2,13 +2,10 @@
from __future__ import annotations
import logging
from typing import Any, Self
from tuya_sharing import CustomerDevice
from homeassistant.components.sensor import SensorStateClass
from .type_information import (
BitmapTypeInformation,
BooleanTypeInformation,
@@ -20,15 +17,12 @@ from .type_information import (
TypeInformation,
)
_LOGGER = logging.getLogger(__name__)
class DeviceWrapper[T]:
"""Base device wrapper."""
native_unit: str | None = None
suggested_unit: str | None = None
state_class: SensorStateClass | None = None
max_value: float
min_value: float
@@ -36,13 +30,6 @@ class DeviceWrapper[T]:
options: list[str]
def initialize(self, device: CustomerDevice) -> None:
"""Initialize the wrapper with device data.
Called when the entity is added to Home Assistant.
Override in subclasses to perform initialization logic.
"""
def skip_update(
self,
device: CustomerDevice,
@@ -223,59 +210,6 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeInformation])
)
class DPCodeDeltaIntegerWrapper(DPCodeIntegerWrapper):
"""Wrapper for integer values with delta report accumulation.
This wrapper handles sensors that report incremental (delta) values
instead of cumulative totals. It accumulates the delta values locally
to provide a running total.
"""
_accumulated_value: float = 0
_last_dp_timestamp: int | None = None
def __init__(self, dpcode: str, type_information: IntegerTypeInformation) -> None:
"""Init DPCodeDeltaIntegerWrapper."""
super().__init__(dpcode, type_information)
# Delta reports use TOTAL_INCREASING state class
self.state_class = SensorStateClass.TOTAL_INCREASING
def skip_update(
self,
device: CustomerDevice,
updated_status_properties: list[str] | None,
dp_timestamps: dict[str, int] | None,
) -> bool:
"""Override skip_update to process delta updates.
Processes delta accumulation before determining if update should be skipped.
"""
if (
super().skip_update(device, updated_status_properties, dp_timestamps)
or dp_timestamps is None
or (current_timestamp := dp_timestamps.get(self.dpcode)) is None
or current_timestamp == self._last_dp_timestamp
or (raw_value := super().read_device_status(device)) is None
):
return True
delta = float(raw_value)
self._accumulated_value += delta
_LOGGER.debug(
"Delta update for %s: +%s, total: %s",
self.dpcode,
delta,
self._accumulated_value,
)
self._last_dp_timestamp = current_timestamp
return False
def read_device_status(self, device: CustomerDevice) -> float | None:
"""Read device status, returning accumulated value for delta reports."""
return self._accumulated_value
class DPCodeRawWrapper(DPCodeTypeInformationWrapper[RawTypeInformation]):
"""Wrapper to extract information from a RAW/binary value."""

View File

@@ -40,7 +40,6 @@ from .const import (
from .entity import TuyaEntity
from .models import (
DeviceWrapper,
DPCodeDeltaIntegerWrapper,
DPCodeEnumWrapper,
DPCodeIntegerWrapper,
DPCodeJsonWrapper,
@@ -49,7 +48,7 @@ from .models import (
DPCodeWrapper,
)
from .raw_data_models import ElectricityData
from .type_information import EnumTypeInformation, IntegerTypeInformation
from .type_information import EnumTypeInformation
class _WindDirectionWrapper(DPCodeTypeInformationWrapper[EnumTypeInformation]):
@@ -1740,13 +1739,11 @@ def _get_dpcode_wrapper(
return wrapper
return None
# Check for integer type first, using delta wrapper only for sum report_type
if type_information := IntegerTypeInformation.find_dpcode(device, dpcode):
if type_information.report_type == "sum":
return DPCodeDeltaIntegerWrapper(type_information.dpcode, type_information)
return DPCodeIntegerWrapper(type_information.dpcode, type_information)
for cls in (DPCodeIntegerWrapper, DPCodeEnumWrapper):
if wrapper := cls.find_dpcode(device, dpcode):
return wrapper
return DPCodeEnumWrapper.find_dpcode(device, dpcode)
return None
async def async_setup_entry(
@@ -1801,8 +1798,6 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
self._attr_native_unit_of_measurement = dpcode_wrapper.native_unit
if description.suggested_unit_of_measurement is None:
self._attr_suggested_unit_of_measurement = dpcode_wrapper.suggested_unit
if description.state_class is None:
self._attr_state_class = dpcode_wrapper.state_class
self._validate_device_class_unit()

View File

@@ -54,9 +54,7 @@ class TypeInformation[T]:
return raw_value
@classmethod
def _from_json(
cls, dpcode: str, type_data: str, *, report_type: str | None
) -> Self | None:
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
"""Load JSON string and return a TypeInformation object."""
return cls(dpcode=dpcode, type_data=type_data)
@@ -82,18 +80,13 @@ class TypeInformation[T]:
)
for dpcode in dpcodes:
report_type = (
sr.report_type if (sr := device.status_range.get(dpcode)) else None
)
for device_specs in lookup_tuple:
if (
(current_definition := device_specs.get(dpcode))
and parse_dptype(current_definition.type) is cls._DPTYPE
and (
type_information := cls._from_json(
dpcode=dpcode,
type_data=current_definition.values,
report_type=report_type,
dpcode=dpcode, type_data=current_definition.values
)
)
):
@@ -111,9 +104,7 @@ class BitmapTypeInformation(TypeInformation[int]):
label: list[str]
@classmethod
def _from_json(
cls, dpcode: str, type_data: str, *, report_type: str | None
) -> Self | None:
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
"""Load JSON string and return a BitmapTypeInformation object."""
if not (parsed := cast(dict[str, Any] | None, json_loads_object(type_data))):
return None
@@ -186,9 +177,7 @@ class EnumTypeInformation(TypeInformation[str]):
return raw_value
@classmethod
def _from_json(
cls, dpcode: str, type_data: str, *, report_type: str | None
) -> Self | None:
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
"""Load JSON string and return an EnumTypeInformation object."""
if not (parsed := json_loads_object(type_data)):
return None
@@ -210,7 +199,6 @@ class IntegerTypeInformation(TypeInformation[float]):
scale: int
step: int
unit: str | None = None
report_type: str | None
def scale_value(self, value: int) -> float:
"""Scale a value."""
@@ -246,9 +234,7 @@ class IntegerTypeInformation(TypeInformation[float]):
return raw_value / (10**self.scale)
@classmethod
def _from_json(
cls, dpcode: str, type_data: str, *, report_type: str | None
) -> Self | None:
def _from_json(cls, dpcode: str, type_data: str) -> Self | None:
"""Load JSON string and return an IntegerTypeInformation object."""
if not (parsed := cast(dict[str, Any] | None, json_loads_object(type_data))):
return None
@@ -261,7 +247,6 @@ class IntegerTypeInformation(TypeInformation[float]):
scale=int(parsed["scale"]),
step=int(parsed["step"]),
unit=parsed.get("unit"),
report_type=report_type,
)

View File

@@ -12,7 +12,7 @@
"documentation": "https://www.home-assistant.io/integrations/xbox",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["python-xbox==0.1.3"],
"requirements": ["python-xbox==0.1.2"],
"ssdp": [
{
"manufacturer": "Microsoft Corporation",

View File

@@ -25,5 +25,5 @@
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["xiaomi-ble==1.6.0"]
"requirements": ["xiaomi-ble==1.5.0"]
}

View File

@@ -47,6 +47,14 @@ SERVER_SOFTWARE = (
f"aiohttp/{aiohttp.__version__} Python/{sys.version_info[0]}.{sys.version_info[1]}"
)
ENABLE_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
3,
13,
1,
) or sys.version_info < (3, 12, 7)
# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
# which first appeared in Python 3.12.7 and 3.13.1
WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session"
#
@@ -372,6 +380,7 @@ def _async_get_connector(
connector = HomeAssistantTCPConnector(
family=family,
enable_cleanup_closed=ENABLE_CLEANUP_CLOSED,
ssl=ssl_context,
limit=MAXIMUM_CONNECTIONS,
limit_per_host=MAXIMUM_CONNECTIONS_PER_HOST,

View File

@@ -104,7 +104,6 @@ _AMBIENT_IDEAL_GAS_MOLAR_VOLUME = ( # m3⋅mol⁻¹
# Molar masses in g⋅mol⁻¹
_CARBON_MONOXIDE_MOLAR_MASS = 28.01
_NITROGEN_DIOXIDE_MOLAR_MASS = 46.0055
_NITROGEN_MONOXIDE_MOLAR_MASS = 30.0061
_OZONE_MOLAR_MASS = 48.00
_SULPHUR_DIOXIDE_MOLAR_MASS = 64.066
@@ -503,22 +502,6 @@ class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
}
class NitrogenMonoxideConcentrationConverter(BaseUnitConverter):
"""Convert nitrogen monoxide ratio to mass per volume."""
UNIT_CLASS = "nitrogen_monoxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_NITROGEN_MONOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class OzoneConcentrationConverter(BaseUnitConverter):
"""Convert ozone ratio to mass per volume."""

10
requirements_all.txt generated
View File

@@ -1296,7 +1296,7 @@ influxdb==5.3.1
inkbird-ble==1.1.1
# homeassistant.components.insteon
insteon-frontend-home-assistant==0.6.1
insteon-frontend-home-assistant==0.6.0
# homeassistant.components.intellifire
intellifire4py==4.2.1
@@ -1526,7 +1526,7 @@ mozart-api==5.3.1.108.0
mullvad-api==1.0.0
# homeassistant.components.music_assistant
music-assistant-client==1.3.3
music-assistant-client==1.3.2
# homeassistant.components.tts
mutagen==1.47.0
@@ -1683,7 +1683,7 @@ openwrt-luci-rpc==1.1.17
openwrt-ubus-rpc==0.0.2
# homeassistant.components.opower
opower==0.16.5
opower==0.16.4
# homeassistant.components.oralb
oralb-ble==1.0.2
@@ -2607,7 +2607,7 @@ python-telegram-bot[socks]==22.1
python-vlc==3.0.18122
# homeassistant.components.xbox
python-xbox==0.1.3
python-xbox==0.1.2
# homeassistant.components.egardia
pythonegardia==1.0.52
@@ -3215,7 +3215,7 @@ wsdot==0.0.1
wyoming==1.7.2
# homeassistant.components.xiaomi_ble
xiaomi-ble==1.6.0
xiaomi-ble==1.5.0
# homeassistant.components.knx
xknx==3.14.0

View File

@@ -1142,7 +1142,7 @@ influxdb==5.3.1
inkbird-ble==1.1.1
# homeassistant.components.insteon
insteon-frontend-home-assistant==0.6.1
insteon-frontend-home-assistant==0.6.0
# homeassistant.components.intellifire
intellifire4py==4.2.1
@@ -1333,7 +1333,7 @@ mozart-api==5.3.1.108.0
mullvad-api==1.0.0
# homeassistant.components.music_assistant
music-assistant-client==1.3.3
music-assistant-client==1.3.2
# homeassistant.components.tts
mutagen==1.47.0
@@ -1457,7 +1457,7 @@ openrgb-python==0.3.6
openwebifpy==4.3.1
# homeassistant.components.opower
opower==0.16.5
opower==0.16.4
# homeassistant.components.oralb
oralb-ble==1.0.2
@@ -2194,7 +2194,7 @@ python-technove==2.0.0
python-telegram-bot[socks]==22.1
# homeassistant.components.xbox
python-xbox==0.1.3
python-xbox==0.1.2
# homeassistant.components.uptime_kuma
pythonkuma==0.3.2
@@ -2691,7 +2691,7 @@ wsdot==0.0.1
wyoming==1.7.2
# homeassistant.components.xiaomi_ble
xiaomi-ble==1.6.0
xiaomi-ble==1.5.0
# homeassistant.components.knx
xknx==3.14.0

View File

@@ -41,7 +41,7 @@ PACKAGE_CHECK_VERSION_RANGE = {
"pymodbus": "Custom",
"pytz": "CalVer",
"requests": "SemVer",
"typing-extensions": "SemVer",
"typing_extensions": "SemVer",
"urllib3": "SemVer",
"yarl": "SemVer",
"zeroconf": "SemVer",

View File

@@ -376,14 +376,14 @@
'object_id_base': 'Nitrogen monoxide',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.NITROGEN_MONOXIDE: 'nitrogen_monoxide'>,
'original_device_class': None,
'original_icon': None,
'original_name': 'Nitrogen monoxide',
'platform': 'google_air_quality',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'nitrogen_monoxide',
'unique_id': 'no_10.1_20.1',
'unit_of_measurement': 'ppb',
})
@@ -392,7 +392,6 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by Google Air Quality',
'device_class': 'nitrogen_monoxide',
'friendly_name': 'Home Nitrogen monoxide',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'ppb',

View File

@@ -1,9 +1,7 @@
"""Test Hikvision integration setup and unload."""
from unittest.mock import MagicMock
from xml.etree.ElementTree import ParseError
import pytest
import requests
from homeassistant.config_entries import ConfigEntryState
@@ -104,69 +102,3 @@ async def test_setup_entry_nvr_fetches_events(
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_hik_nvr.return_value.get_event_triggers.assert_called_once()
mock_hik_nvr.return_value.inject_events.assert_called_once()
async def test_setup_entry_nvr_event_fetch_request_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hik_nvr: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test setup continues when NVR event fetch fails with request error."""
mock_hik_nvr.return_value.get_event_triggers.side_effect = (
requests.exceptions.RequestException("Connection error")
)
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_hik_nvr.return_value.get_event_triggers.assert_called_once()
mock_hik_nvr.return_value.inject_events.assert_not_called()
assert f"Unable to fetch event triggers from {TEST_HOST}" in caplog.text
async def test_setup_entry_nvr_event_fetch_parse_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hik_nvr: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test setup continues when NVR event fetch fails with parse error."""
mock_hik_nvr.return_value.get_event_triggers.side_effect = ParseError("Invalid XML")
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_hik_nvr.return_value.get_event_triggers.assert_called_once()
mock_hik_nvr.return_value.inject_events.assert_not_called()
assert f"Unable to fetch event triggers from {TEST_HOST}" in caplog.text
async def test_setup_entry_nvr_no_events_returned(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hik_nvr: MagicMock,
) -> None:
"""Test setup continues when NVR returns no events."""
mock_hik_nvr.return_value.get_event_triggers.return_value = None
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_hik_nvr.return_value.get_event_triggers.assert_called_once()
mock_hik_nvr.return_value.inject_events.assert_not_called()
async def test_setup_entry_nvr_empty_events_returned(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hik_nvr: MagicMock,
) -> None:
"""Test setup continues when NVR returns empty events."""
mock_hik_nvr.return_value.get_event_triggers.return_value = {}
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_hik_nvr.return_value.get_event_triggers.assert_called_once()
mock_hik_nvr.return_value.inject_events.assert_not_called()

View File

@@ -683,18 +683,26 @@ async def test_ssl_issue_urls_configured(
"hassio",
"http_config",
"expected_serverhost",
"expected_warning_count",
"expected_issues",
),
[
(False, {}, ["0.0.0.0", "::"], set()),
(False, {"server_host": "0.0.0.0"}, ["0.0.0.0"], set()),
(True, {}, ["0.0.0.0", "::"], set()),
(False, {}, ["0.0.0.0", "::"], 0, set()),
(
False,
{"server_host": "0.0.0.0"},
["0.0.0.0"],
1,
{("http", "server_host_deprecated")},
),
(True, {}, ["0.0.0.0", "::"], 0, set()),
(
True,
{"server_host": "0.0.0.0"},
[
"0.0.0.0",
],
1,
{("http", "server_host_deprecated_hassio")},
),
],
@@ -705,6 +713,7 @@ async def test_server_host(
issue_registry: ir.IssueRegistry,
http_config: dict,
expected_serverhost: list,
expected_warning_count: int,
expected_issues: set[tuple[str, str]],
caplog: pytest.LogCaptureFixture,
) -> None:
@@ -734,4 +743,11 @@ async def test_server_host(
reuse_port=None,
)
assert (
caplog.text.count(
"The 'server_host' option is deprecated, please remove it from your configuration"
)
== expected_warning_count
)
assert set(issue_registry.issues) == expected_issues

View File

@@ -1,16 +1,17 @@
"""Test device tracker conditions."""
"""Test lock conditions."""
from typing import Any
import pytest
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.components.lock.const import LockState
from homeassistant.core import HomeAssistant
from tests.components import (
ConditionStateDescription,
assert_condition_gated_by_labs_flag,
create_target_condition,
other_states,
parametrize_condition_states_all,
parametrize_condition_states_any,
parametrize_target_entities,
@@ -20,48 +21,60 @@ from tests.components import (
@pytest.fixture
async def target_device_trackers(hass: HomeAssistant) -> list[str]:
"""Create multiple device tracker entities associated with different targets."""
return (await target_entities(hass, "device_tracker"))["included"]
async def target_locks(hass: HomeAssistant) -> list[str]:
"""Create multiple lock entities associated with different targets."""
return (await target_entities(hass, "lock"))["included"]
@pytest.mark.parametrize(
"condition",
[
"device_tracker.is_home",
"device_tracker.is_not_home",
"lock.is_jammed",
"lock.is_locked",
"lock.is_open",
"lock.is_unlocked",
],
)
async def test_device_tracker_conditions_gated_by_labs_flag(
async def test_lock_conditions_gated_by_labs_flag(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, condition: str
) -> None:
"""Test the device tracker conditions are gated by the labs flag."""
"""Test the lock conditions are gated by the labs flag."""
await assert_condition_gated_by_labs_flag(hass, caplog, condition)
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
parametrize_target_entities("lock"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[
*parametrize_condition_states_any(
condition="device_tracker.is_home",
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
condition="lock.is_jammed",
target_states=[LockState.JAMMED],
other_states=other_states(LockState.JAMMED),
),
*parametrize_condition_states_any(
condition="device_tracker.is_not_home",
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
condition="lock.is_locked",
target_states=[LockState.LOCKED],
other_states=other_states(LockState.LOCKED),
),
*parametrize_condition_states_any(
condition="lock.is_open",
target_states=[LockState.OPEN],
other_states=other_states(LockState.OPEN),
),
*parametrize_condition_states_any(
condition="lock.is_unlocked",
target_states=[LockState.UNLOCKED],
other_states=other_states(LockState.UNLOCKED),
),
],
)
async def test_device_tracker_state_condition_behavior_any(
async def test_lock_state_condition_behavior_any(
hass: HomeAssistant,
target_device_trackers: list[str],
target_locks: list[str],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
@@ -69,11 +82,11 @@ async def test_device_tracker_state_condition_behavior_any(
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker state condition with the 'any' behavior."""
other_entity_ids = set(target_device_trackers) - {entity_id}
"""Test the lock state condition with the 'any' behavior."""
other_entity_ids = set(target_locks) - {entity_id}
# Set all device trackers, including the tested one, to the initial state
for eid in target_device_trackers:
# Set all locks, including the tested lock, to the initial state
for eid in target_locks:
set_or_remove_state(hass, eid, states[0]["included"])
await hass.async_block_till_done()
@@ -90,7 +103,7 @@ async def test_device_tracker_state_condition_behavior_any(
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
# Check if changing other device trackers also passes the condition
# Check if changing other locks also passes the condition
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
@@ -100,26 +113,36 @@ async def test_device_tracker_state_condition_behavior_any(
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
parametrize_target_entities("lock"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[
*parametrize_condition_states_all(
condition="device_tracker.is_home",
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
condition="lock.is_jammed",
target_states=[LockState.JAMMED],
other_states=other_states(LockState.JAMMED),
),
*parametrize_condition_states_all(
condition="device_tracker.is_not_home",
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
condition="lock.is_locked",
target_states=[LockState.LOCKED],
other_states=other_states(LockState.LOCKED),
),
*parametrize_condition_states_all(
condition="lock.is_open",
target_states=[LockState.OPEN],
other_states=other_states(LockState.OPEN),
),
*parametrize_condition_states_all(
condition="lock.is_unlocked",
target_states=[LockState.UNLOCKED],
other_states=other_states(LockState.UNLOCKED),
),
],
)
async def test_device_tracker_state_condition_behavior_all(
async def test_lock_state_condition_behavior_all(
hass: HomeAssistant,
target_device_trackers: list[str],
target_locks: list[str],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
@@ -127,11 +150,11 @@ async def test_device_tracker_state_condition_behavior_all(
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker state condition with the 'all' behavior."""
other_entity_ids = set(target_device_trackers) - {entity_id}
"""Test the lock state condition with the 'all' behavior."""
other_entity_ids = set(target_locks) - {entity_id}
# Set all device trackers, including the tested one, to the initial state
for eid in target_device_trackers:
# Set all locks, including the tested lock, to the initial state
for eid in target_locks:
set_or_remove_state(hass, eid, states[0]["included"])
await hass.async_block_till_done()

View File

@@ -538,9 +538,8 @@ async def test_media_player_play_announcement_action(
"players/cmd/play_announcement",
player_id=mass_player_id,
url="http://blah.com/announcement.mp3",
pre_announce=True,
use_pre_announce=True,
volume_level=50,
pre_announce_url=None,
)

View File

@@ -3107,6 +3107,7 @@ def test_device_class_converters_are_complete() -> None:
SensorDeviceClass.IRRADIANCE,
SensorDeviceClass.MOISTURE,
SensorDeviceClass.MONETARY,
SensorDeviceClass.NITROGEN_MONOXIDE,
SensorDeviceClass.NITROUS_OXIDE,
SensorDeviceClass.PH,
SensorDeviceClass.PM1,

View File

@@ -29,7 +29,6 @@ class MockDeviceListener(DeviceListener):
hass: HomeAssistant,
device: CustomerDevice,
updated_status_properties: dict[str, Any] | None = None,
dp_timestamps: dict[str, int] | None = None,
) -> None:
"""Mock update device method."""
property_list: list[str] = []
@@ -41,7 +40,7 @@ class MockDeviceListener(DeviceListener):
)
device.status[key] = value
property_list.append(key)
self.update_device(device, property_list, dp_timestamps)
self.update_device(device, property_list)
await hass.async_block_till_done()

View File

@@ -8861,7 +8861,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
'state': '0.084',
})
# ---
# name: test_platform_setup_and_discovery[sensor.ha_socket_delta_test_voltage-entry]

View File

@@ -10,7 +10,6 @@ import pytest
from syrupy.assertion import SnapshotAssertion
from tuya_sharing import CustomerDevice, Manager
from homeassistant.components.sensor import SensorStateClass
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
@@ -82,106 +81,3 @@ async def test_selective_state_update(
expected_state=expected_state,
last_reported=last_reported,
)
@patch("homeassistant.components.tuya.PLATFORMS", [Platform.SENSOR])
@pytest.mark.parametrize("mock_device_code", ["cz_guitoc9iylae4axs"])
async def test_delta_report_sensor(
hass: HomeAssistant,
mock_manager: Manager,
mock_config_entry: MockConfigEntry,
mock_device: CustomerDevice,
mock_listener: MockDeviceListener,
) -> None:
"""Test delta report sensor behavior."""
await initialize_entry(hass, mock_manager, mock_config_entry, mock_device)
entity_id = "sensor.ha_socket_delta_test_total_energy"
timestamp = 1000
# Delta sensors start from zero and accumulate values
state = hass.states.get(entity_id)
assert state is not None
assert state.state == "0"
assert state.attributes["state_class"] == SensorStateClass.TOTAL_INCREASING
# Send delta update
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": 200},
{"add_ele": timestamp},
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.2)
# Send delta update (multiple dpcode)
timestamp += 100
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": 300, "switch_1": True},
{"add_ele": timestamp, "switch_1": timestamp},
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.5)
# Send delta update (timestamp not incremented)
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": 500},
{"add_ele": timestamp}, # same timestamp
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.5) # unchanged
# Send delta update (unrelated dpcode)
await mock_listener.async_send_device_update(
hass,
mock_device,
{"switch_1": False},
{"switch_1": timestamp + 100},
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.5) # unchanged
# Send delta update
timestamp += 100
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": 100},
{"add_ele": timestamp},
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.6)
# Send delta update (None value)
timestamp += 100
mock_device.status["add_ele"] = None
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": None},
{"add_ele": timestamp},
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.6) # unchanged
# Send delta update (no timestamp - skipped)
mock_device.status["add_ele"] = 200
await mock_listener.async_send_device_update(
hass,
mock_device,
{"add_ele": 200},
None,
)
state = hass.states.get(entity_id)
assert state is not None
assert float(state.state) == pytest.approx(0.6) # unchanged

View File

@@ -57,7 +57,6 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -108,7 +107,6 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = {
VolumeConverter,
VolumeFlowRateConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
SulphurDioxideConcentrationConverter,
)
}
@@ -171,11 +169,6 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo
CONCENTRATION_PARTS_PER_BILLION,
1.912503,
),
NitrogenMonoxideConcentrationConverter: (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
1.247389,
),
OzoneConcentrationConverter: (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
@@ -414,20 +407,6 @@ _CONVERTED_VALUE: dict[
CONCENTRATION_PARTS_PER_BILLION,
),
],
NitrogenMonoxideConcentrationConverter: [
(
1,
CONCENTRATION_PARTS_PER_BILLION,
1.247389,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
),
(
120,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
96.200906,
CONCENTRATION_PARTS_PER_BILLION,
),
],
ConductivityConverter: [
(
5,