Compare commits

...

25 Commits

Author SHA1 Message Date
Erik
7795650a0d Improve docstrings in condition tests 2026-01-22 13:35:36 +01:00
Jeremiah
32cd649fe4 Bump xiaomi-ble to 1.6.0 (#161421)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
2026-01-22 12:50:37 +01:00
dependabot[bot]
69dc711466 Bump actions/setup-python from 6.1.0 to 6.2.0 (#161417) 2026-01-22 12:07:18 +01:00
Thomas55555
78212245dd Add ppb as a valid UOM for sensor/number NO device class (#161379) 2026-01-22 11:34:29 +01:00
Joost Lekkerkerker
5bbc39bd88 Add integration_type device to screenlogic (#161324) 2026-01-22 07:56:26 +01:00
Robert Resch
6b14eb7ad1 Migrate config entries to string unique id (#161370) 2026-01-21 23:36:21 -05:00
J. Nick Koston
83a53dea94 Fix SSL context mutation by httpx/httpcore with ALPN protocol bucketing (#161330) 2026-01-21 16:53:38 -10:00
Joost Lekkerkerker
4fb89e68a7 Add integration_type hub to sanix (#161322) 2026-01-21 23:18:32 +01:00
Glenn de Haan
5202ddf095 Bump hdfury to 1.4.2 (#161401) 2026-01-21 23:06:06 +01:00
Marc Mueller
f7d7a4502e Update ruff to 0.14.13 (#161399) 2026-01-21 22:43:26 +01:00
Petro31
c7417d77b5 Update template select test framework (#161389) 2026-01-21 22:31:00 +01:00
Petro31
22018f1f80 Update template number tests to new framework (#161395) 2026-01-21 22:30:13 +01:00
Raphael Hehl
22c6704d81 Fix detection of multiple smart object types in single event (#161189)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-21 22:22:34 +01:00
Raphael Hehl
0552934b3c Bump uiprotect to 10.0.1 (#161397)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-01-21 22:20:33 +01:00
Joost Lekkerkerker
bbe1d28e88 Refactor GitHub tests to patch the library instead (#160568) 2026-01-21 22:09:56 +01:00
Robert Resch
b700a27c8f Enable apple tv on Python 3.14 (#161396) 2026-01-21 21:56:51 +01:00
Joost Lekkerkerker
0566a668a9 Add translation for add entry to RDW (#161329) 2026-01-21 21:28:27 +01:00
Marc Mueller
94f636bc2d Update pyatv to 0.17.0 (#161394) 2026-01-21 21:22:26 +01:00
Manu
a6e7546142 Add support for sequence ID to publish action in ntfy integration (#161342) 2026-01-21 17:41:46 +00:00
Thomas55555
493319894b Use device_class for O3 in Google Air Quality (#161380) 2026-01-21 17:34:46 +01:00
Erik Montnemery
987396722b Adjust entity condition strings (#161055) 2026-01-21 16:56:47 +01:00
epenet
4f52b0363d Reorder unit conversion classes alphabetically (#161364) 2026-01-21 15:53:43 +00:00
Daniel Hjelseth Høyer
52e18ed6f6 Simplify tibber config (#160903) 2026-01-21 15:42:25 +01:00
Abílio Costa
4180175fd3 Improve automation variable name (#161340) 2026-01-21 14:27:18 +00:00
Maciej Bieniek
e39ee8cae7 Bump imgw_pib to 2.0.1 (#161376) 2026-01-21 15:26:29 +01:00
108 changed files with 2117 additions and 1798 deletions

View File

@@ -33,7 +33,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -122,7 +122,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -477,7 +477,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -297,7 +297,7 @@ jobs:
- &setup-python-matrix
name: Set up Python ${{ matrix.python-version }}
id: python
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: &actions-setup-python actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true

View File

@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -35,7 +35,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.0
rev: v0.14.13
hooks:
- id: ruff-check
args:

View File

@@ -14,7 +14,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is armed"
"name": "Alarm is armed"
},
"is_armed_away": {
"description": "Tests if one or more alarms are armed in away mode.",
@@ -24,7 +24,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is armed away"
"name": "Alarm is armed away"
},
"is_armed_home": {
"description": "Tests if one or more alarms are armed in home mode.",
@@ -34,7 +34,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is armed home"
"name": "Alarm is armed home"
},
"is_armed_night": {
"description": "Tests if one or more alarms are armed in night mode.",
@@ -44,7 +44,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is armed night"
"name": "Alarm is armed night"
},
"is_armed_vacation": {
"description": "Tests if one or more alarms are armed in vacation mode.",
@@ -54,7 +54,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is armed vacation"
"name": "Alarm is armed vacation"
},
"is_disarmed": {
"description": "Tests if one or more alarms are disarmed.",
@@ -64,7 +64,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is disarmed"
"name": "Alarm is disarmed"
},
"is_triggered": {
"description": "Tests if one or more alarms are triggered.",
@@ -74,7 +74,7 @@
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
}
},
"name": "If an alarm is triggered"
"name": "Alarm is triggered"
}
},
"device_automation": {

View File

@@ -5,9 +5,14 @@ from __future__ import annotations
import asyncio
import logging
from random import randrange
import sys
from typing import Any, cast
from pyatv import connect, exceptions, scan
from pyatv.conf import AppleTV
from pyatv.const import DeviceModel, Protocol
from pyatv.convert import model_str
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
from homeassistant.components import zeroconf
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -24,11 +29,7 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
)
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
@@ -42,18 +43,6 @@ from .const import (
SIGNAL_DISCONNECTED,
)
if sys.version_info < (3, 14):
from pyatv import connect, exceptions, scan
from pyatv.conf import AppleTV
from pyatv.const import DeviceModel, Protocol
from pyatv.convert import model_str
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
else:
class DeviceListener:
"""Dummy class."""
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME_TV = "Apple TV"
@@ -64,30 +53,25 @@ BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
if sys.version_info < (3, 14):
AUTH_EXCEPTIONS = (
exceptions.AuthenticationError,
exceptions.InvalidCredentialsError,
exceptions.NoCredentialsError,
)
CONNECTION_TIMEOUT_EXCEPTIONS = (
OSError,
asyncio.CancelledError,
TimeoutError,
exceptions.ConnectionLostError,
exceptions.ConnectionFailedError,
)
DEVICE_EXCEPTIONS = (
exceptions.ProtocolError,
exceptions.NoServiceError,
exceptions.PairingError,
exceptions.BackOffError,
exceptions.DeviceIdMissingError,
)
else:
AUTH_EXCEPTIONS = ()
CONNECTION_TIMEOUT_EXCEPTIONS = ()
DEVICE_EXCEPTIONS = ()
AUTH_EXCEPTIONS = (
exceptions.AuthenticationError,
exceptions.InvalidCredentialsError,
exceptions.NoCredentialsError,
)
CONNECTION_TIMEOUT_EXCEPTIONS = (
OSError,
asyncio.CancelledError,
TimeoutError,
exceptions.ConnectionLostError,
exceptions.ConnectionFailedError,
)
DEVICE_EXCEPTIONS = (
exceptions.ProtocolError,
exceptions.NoServiceError,
exceptions.PairingError,
exceptions.BackOffError,
exceptions.DeviceIdMissingError,
)
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
@@ -95,10 +79,6 @@ type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
"""Set up a config entry for Apple TV."""
if sys.version_info >= (3, 14):
raise HomeAssistantError(
"Apple TV is not supported on Python 3.14. Please use Python 3.13."
)
manager = AppleTVManager(hass, entry)
if manager.is_on:

View File

@@ -8,7 +8,7 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyatv", "srptools"],
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
"requirements": ["pyatv==0.17.0"],
"zeroconf": [
"_mediaremotetv._tcp.local.",
"_companion-link._tcp.local.",

View File

@@ -239,6 +239,15 @@ class AppleTvMediaPlayer(
"""
self.async_write_ha_state()
@callback
def volume_device_update(
self, output_device: OutputDevice, old_level: float, new_level: float
) -> None:
"""Output device volume was updated.
This is a callback function from pyatv.interface.AudioListener.
"""
@callback
def outputdevices_update(
self, old_devices: list[OutputDevice], new_devices: list[OutputDevice]

View File

@@ -2,14 +2,35 @@
from __future__ import annotations
import logging
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import ArveConfigEntry, ArveCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_migrate_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
"""Migrate entry."""
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
if entry.version == 1:
# 1 -> 1.2: Unique ID from integer to string
if entry.minor_version == 1:
minor_version = 2
hass.config_entries.async_update_entry(
entry, unique_id=str(entry.unique_id), minor_version=minor_version
)
_LOGGER.debug("Migration successful")
return True
async def async_setup_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
"""Set up Arve from a config entry."""

View File

@@ -19,6 +19,9 @@ _LOGGER = logging.getLogger(__name__)
class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Arve."""
VERSION = 1
MINOR_VERSION = 2
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -35,7 +38,7 @@ class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
except ArveConnectionError:
errors["base"] = "cannot_connect"
else:
await self.async_set_unique_id(customer.customerId)
await self.async_set_unique_id(str(customer.customerId))
self._abort_if_unique_id_configured()
return self.async_create_entry(
title="Arve",

View File

@@ -14,7 +14,7 @@
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
}
},
"name": "If a satellite is idle"
"name": "Satellite is idle"
},
"is_listening": {
"description": "Tests if one or more Assist satellites are listening.",
@@ -24,7 +24,7 @@
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
}
},
"name": "If a satellite is listening"
"name": "Satellite is listening"
},
"is_processing": {
"description": "Tests if one or more Assist satellites are processing.",
@@ -34,7 +34,7 @@
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
}
},
"name": "If a satellite is processing"
"name": "Satellite is processing"
},
"is_responding": {
"description": "Tests if one or more Assist satellites are responding.",
@@ -44,7 +44,7 @@
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
}
},
"name": "If a satellite is responding"
"name": "Satellite is responding"
}
},
"entity_component": {

View File

@@ -56,7 +56,7 @@ from homeassistant.core import (
valid_entity_id,
)
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers import condition as condition_helper, config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.issue_registry import (
@@ -554,7 +554,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
automation_id: str | None,
name: str,
trigger_config: list[ConfigType],
cond_func: IfAction | None,
condition: IfAction | None,
action_script: Script,
initial_state: bool | None,
variables: ScriptVariables | None,
@@ -567,7 +567,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
self._attr_name = name
self._trigger_config = trigger_config
self._async_detach_triggers: CALLBACK_TYPE | None = None
self._cond_func = cond_func
self._condition = condition
self.action_script = action_script
self.action_script.change_listener = self.async_write_ha_state
self._initial_state = initial_state
@@ -602,9 +602,11 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced labels."""
referenced = self.action_script.referenced_labels
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_targets(conf, ATTR_LABEL_ID)
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(
conf, ATTR_LABEL_ID
)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
@@ -615,9 +617,11 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced floors."""
referenced = self.action_script.referenced_floors
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_targets(conf, ATTR_FLOOR_ID)
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(
conf, ATTR_FLOOR_ID
)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
@@ -628,9 +632,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced areas."""
referenced = self.action_script.referenced_areas
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_targets(conf, ATTR_AREA_ID)
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
@@ -648,9 +652,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced devices."""
referenced = self.action_script.referenced_devices
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_devices(conf)
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_devices(conf)
for conf in self._trigger_config:
referenced |= set(_trigger_extract_devices(conf))
@@ -662,9 +666,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced entities."""
referenced = self.action_script.referenced_entities
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_entities(conf)
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_entities(conf)
for conf in self._trigger_config:
for entity_id in _trigger_extract_entities(conf):
@@ -784,8 +788,8 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
if (
not skip_condition
and self._cond_func is not None
and not self._cond_func(variables)
and self._condition is not None
and not self._condition(variables)
):
self._logger.debug(
"Conditions not met, aborting automation. Condition summary: %s",
@@ -1047,12 +1051,12 @@ async def _create_automation_entities(
)
if CONF_CONDITIONS in config_block:
cond_func = await _async_process_if(hass, name, config_block)
condition = await _async_process_if(hass, name, config_block)
if cond_func is None:
if condition is None:
continue
else:
cond_func = None
condition = None
# Add trigger variables to variables
variables = None
@@ -1070,7 +1074,7 @@ async def _create_automation_entities(
automation_id,
name,
config_block[CONF_TRIGGERS],
cond_func,
condition,
action_script,
initial_state,
variables,
@@ -1212,7 +1216,7 @@ async def _async_process_if(
if_configs = config[CONF_CONDITIONS]
try:
if_action = await condition.async_conditions_from_config(
if_action = await condition_helper.async_conditions_from_config(
hass, if_configs, LOGGER, name
)
except HomeAssistantError as ex:

View File

@@ -14,7 +14,7 @@
"name": "[%key:component::fan::common::condition_behavior_name%]"
}
},
"name": "If a fan is off"
"name": "Fan is off"
},
"is_on": {
"description": "Tests if one or more fans are on.",
@@ -24,7 +24,7 @@
"name": "[%key:component::fan::common::condition_behavior_name%]"
}
},
"name": "If a fan is on"
"name": "Fan is on"
}
},
"device_automation": {

View File

@@ -12,9 +12,6 @@
},
"non_methane_hydrocarbons": {
"default": "mdi:molecule"
},
"ozone": {
"default": "mdi:molecule"
}
}
}

View File

@@ -154,8 +154,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
),
AirQualitySensorEntityDescription(
key="o3",
translation_key="ozone",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.OZONE,
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.o3.concentration.value,

View File

@@ -211,9 +211,6 @@
"non_methane_hydrocarbons": {
"name": "Non-methane hydrocarbons"
},
"ozone": {
"name": "[%key:component::sensor::entity_component::ozone::name%]"
},
"uaqi": {
"name": "Universal Air Quality Index"
},

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["hdfury==1.3.1"]
"requirements": ["hdfury==1.4.2"]
}

View File

@@ -28,6 +28,7 @@ from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN, UPDATE_INTERVAL
from .entity import AqualinkEntity
@@ -66,7 +67,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
aqualink = AqualinkClient(username, password, httpx_client=get_async_client(hass))
aqualink = AqualinkClient(
username,
password,
httpx_client=get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2),
)
try:
await aqualink.login()
except AqualinkServiceException as login_exception:

View File

@@ -15,6 +15,7 @@ import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN
@@ -36,7 +37,11 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
try:
async with AqualinkClient(
username, password, httpx_client=get_async_client(self.hass)
username,
password,
httpx_client=get_async_client(
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
),
):
pass
except AqualinkServiceUnauthorizedException:

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["imgw_pib==1.6.0"]
"requirements": ["imgw_pib==2.0.1"]
}

View File

@@ -49,7 +49,7 @@
"name": "[%key:component::light::common::condition_behavior_name%]"
}
},
"name": "If a light is off"
"name": "Light is off"
},
"is_on": {
"description": "Tests if one or more lights are on.",
@@ -59,7 +59,7 @@
"name": "[%key:component::light::common::condition_behavior_name%]"
}
},
"name": "If a light is on"
"name": "Light is on"
}
},
"device_automation": {

View File

@@ -2,6 +2,7 @@
from dataclasses import dataclass
from http import HTTPStatus
import logging
import aiohttp
from microBeesPy import MicroBees
@@ -15,6 +16,8 @@ from homeassistant.helpers import config_entry_oauth2_flow
from .const import DOMAIN, PLATFORMS
from .coordinator import MicroBeesUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class HomeAssistantMicroBeesData:
@@ -25,6 +28,23 @@ class HomeAssistantMicroBeesData:
session: config_entry_oauth2_flow.OAuth2Session
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Migrate entry."""
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
if entry.version == 1:
# 1 -> 1.2: Unique ID from integer to string
if entry.minor_version == 1:
minor_version = 2
hass.config_entries.async_update_entry(
entry, unique_id=str(entry.unique_id), minor_version=minor_version
)
_LOGGER.debug("Migration successful")
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up microBees from a config entry."""
implementation = (

View File

@@ -19,6 +19,8 @@ class OAuth2FlowHandler(
"""Handle a config flow for microBees."""
DOMAIN = DOMAIN
VERSION = 1
MINOR_VERSION = 2
@property
def logger(self) -> logging.Logger:
@@ -47,7 +49,7 @@ class OAuth2FlowHandler(
self.logger.exception("Unexpected error")
return self.async_abort(reason="unknown")
await self.async_set_unique_id(current_user.id)
await self.async_set_unique_id(str(current_user.id))
if self.source != SOURCE_REAUTH:
self._abort_if_unique_id_configured()
return self.async_create_entry(

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
@@ -15,9 +17,28 @@ from .api import AuthenticatedMonzoAPI
from .const import DOMAIN
from .coordinator import MonzoCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Migrate entry."""
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
if entry.version == 1:
# 1 -> 1.2: Unique ID from integer to string
if entry.minor_version == 1:
minor_version = 2
hass.config_entries.async_update_entry(
entry, unique_id=str(entry.unique_id), minor_version=minor_version
)
_LOGGER.debug("Migration successful")
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Monzo from a config entry."""
implementation = await async_get_config_entry_implementation(hass, entry)

View File

@@ -21,6 +21,8 @@ class MonzoFlowHandler(
"""Handle a config flow."""
DOMAIN = DOMAIN
VERSION = 1
MINOR_VERSION = 2
oauth_data: dict[str, Any]
@@ -51,7 +53,7 @@ class MonzoFlowHandler(
"""Create an entry for the flow."""
self.oauth_data = data
user_id = data[CONF_TOKEN]["user_id"]
await self.async_set_unique_id(user_id)
await self.async_set_unique_id(str(user_id))
if self.source != SOURCE_REAUTH:
self._abort_if_unique_id_configured()
else:

View File

@@ -43,6 +43,7 @@ ATTR_ICON = "icon"
ATTR_MARKDOWN = "markdown"
ATTR_PRIORITY = "priority"
ATTR_TAGS = "tags"
ATTR_SEQUENCE_ID = "sequence_id"
SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
{
@@ -60,6 +61,7 @@ SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
vol.Optional(ATTR_EMAIL): vol.Email(),
vol.Optional(ATTR_CALL): cv.string,
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
}
)

View File

@@ -88,3 +88,8 @@ publish:
type: url
autocomplete: url
example: https://example.org/logo.png
sequence_id:
required: false
selector:
text:
example: "Mc3otamDNcpJ"

View File

@@ -1,6 +1,7 @@
{
"common": {
"add_topic_description": "Set up a topic for notifications.",
"sequence_id": "Sequence ID",
"topic": "Topic"
},
"config": {
@@ -171,6 +172,9 @@
"icon": { "name": "Icon" },
"message": { "name": "Message" },
"priority": { "name": "Priority" },
"sequence_id": {
"name": "[%key:component::ntfy::common::sequence_id%]"
},
"tags": { "name": "Tags" },
"time": { "name": "Time" },
"title": { "name": "Title" },
@@ -356,6 +360,10 @@
"description": "All messages have a priority that defines how urgently your phone notifies you, depending on the configured vibration patterns, notification sounds, and visibility in the notification drawer or pop-over.",
"name": "Message priority"
},
"sequence_id": {
"description": "Enter a message or sequence ID to update an existing notification, or specify a sequence ID to reference later when updating, clearing (mark as read and dismiss), or deleting a notification.",
"name": "[%key:component::ntfy::common::sequence_id%]"
},
"tags": {
"description": "Add tags or emojis to the notification. Emojis (using shortcodes like smile) will appear in the notification title or message. Other tags will be displayed below the notification content.",
"name": "Tags/Emojis"

View File

@@ -253,7 +253,7 @@ class NumberDeviceClass(StrEnum):
NITROGEN_MONOXIDE = "nitrogen_monoxide"
"""Amount of NO.
Unit of measurement: `μg/m³`
Unit of measurement: `ppb` (parts per billion), `μg/m³`
"""
NITROUS_OXIDE = "nitrous_oxide"
@@ -521,7 +521,10 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.NITROGEN_MONOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,

View File

@@ -7,6 +7,9 @@
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unknown_license_plate": "Unknown license plate"
},
"initiate_flow": {
"user": "Add vehicle"
},
"step": {
"user": {
"data": {

View File

@@ -60,6 +60,7 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -228,6 +229,7 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
CarbonMonoxideConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
SulphurDioxideConcentrationConverter,
TemperatureDeltaConverter,

View File

@@ -34,6 +34,7 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -94,6 +95,9 @@ UNIT_SCHEMA = vol.Schema(
vol.Optional("nitrogen_dioxide"): vol.In(
NitrogenDioxideConcentrationConverter.VALID_UNITS
),
vol.Optional("nitrogen_monoxide"): vol.In(
NitrogenMonoxideConcentrationConverter.VALID_UNITS
),
vol.Optional("ozone"): vol.In(OzoneConcentrationConverter.VALID_UNITS),
vol.Optional("power"): vol.In(PowerConverter.VALID_UNITS),
vol.Optional("pressure"): vol.In(PressureConverter.VALID_UNITS),

View File

@@ -4,6 +4,7 @@
"codeowners": ["@tomaszsluszniak"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sanix",
"integration_type": "device",
"iot_class": "cloud_polling",
"requirements": ["sanix==1.0.6"]
}

View File

@@ -13,6 +13,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/screenlogic",
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["screenlogicpy"],
"requirements": ["screenlogicpy==0.10.2"]

View File

@@ -64,6 +64,7 @@ from homeassistant.util.unit_conversion import (
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
NitrogenMonoxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
@@ -291,7 +292,7 @@ class SensorDeviceClass(StrEnum):
NITROGEN_MONOXIDE = "nitrogen_monoxide"
"""Amount of NO.
Unit of measurement: `μg/m³`
Unit of measurement: `ppb` (parts per billion), `μg/m³`
"""
NITROUS_OXIDE = "nitrous_oxide"
@@ -566,6 +567,7 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
SensorDeviceClass.ENERGY_STORAGE: EnergyConverter,
SensorDeviceClass.GAS: VolumeConverter,
SensorDeviceClass.NITROGEN_DIOXIDE: NitrogenDioxideConcentrationConverter,
SensorDeviceClass.NITROGEN_MONOXIDE: NitrogenMonoxideConcentrationConverter,
SensorDeviceClass.OZONE: OzoneConcentrationConverter,
SensorDeviceClass.POWER: PowerConverter,
SensorDeviceClass.POWER_FACTOR: UnitlessRatioConverter,
@@ -639,7 +641,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.NITROGEN_MONOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,

View File

@@ -49,8 +49,8 @@ DEFAULT_NAME = "Template Select"
SELECT_COMMON_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_OPTIONS): cv.template,
vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
vol.Required(ATTR_OPTIONS): cv.template,
vol.Required(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_STATE): cv.template,
}
)

View File

@@ -8,7 +8,6 @@ import logging
import aiohttp
from aiohttp.client_exceptions import ClientError, ClientResponseError
import tibber
from tibber import data_api as tibber_data_api
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import Event, HomeAssistant
@@ -23,13 +22,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ssl as ssl_util
from .const import (
AUTH_IMPLEMENTATION,
CONF_LEGACY_ACCESS_TOKEN,
DATA_HASS_CONFIG,
DOMAIN,
TibberConfigEntry,
)
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN, TibberConfigEntry
from .coordinator import TibberDataAPICoordinator
from .services import async_setup_services
@@ -44,24 +37,23 @@ _LOGGER = logging.getLogger(__name__)
class TibberRuntimeData:
"""Runtime data for Tibber API entries."""
tibber_connection: tibber.Tibber
session: OAuth2Session
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
_client: tibber_data_api.TibberDataAPI | None = None
_client: tibber.Tibber | None = None
async def async_get_client(
self, hass: HomeAssistant
) -> tibber_data_api.TibberDataAPI:
"""Return an authenticated Tibber Data API client."""
async def async_get_client(self, hass: HomeAssistant) -> tibber.Tibber:
"""Return an authenticated Tibber client."""
await self.session.async_ensure_token_valid()
token = self.session.token
access_token = token.get(CONF_ACCESS_TOKEN)
if not access_token:
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
if self._client is None:
self._client = tibber_data_api.TibberDataAPI(
access_token,
self._client = tibber.Tibber(
access_token=access_token,
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
self._client.set_access_token(access_token)
return self._client
@@ -88,32 +80,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
translation_key="data_api_reauth_required",
)
tibber_connection = tibber.Tibber(
access_token=entry.data[CONF_LEGACY_ACCESS_TOKEN],
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
) as err:
raise ConfigEntryNotReady("Unable to connect") from err
except tibber.InvalidLoginError as exp:
_LOGGER.error("Failed to login. %s", exp)
return False
except tibber.FatalHttpExceptionError:
return False
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
@@ -135,10 +101,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
raise ConfigEntryNotReady from err
entry.runtime_data = TibberRuntimeData(
tibber_connection=tibber_connection,
session=session,
)
tibber_connection = await entry.runtime_data.async_get_client(hass)
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
) as err:
raise ConfigEntryNotReady("Unable to connect") from err
except tibber.InvalidLoginError as err:
raise ConfigEntryAuthFailed("Invalid login credentials") from err
except tibber.FatalHttpExceptionError as err:
raise ConfigEntryNotReady("Fatal HTTP error from Tibber API") from err
coordinator = TibberDataAPICoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data.data_api_coordinator = coordinator
@@ -154,5 +139,6 @@ async def async_unload_entry(
if unload_ok := await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
):
await config_entry.runtime_data.tibber_connection.rt_disconnect()
tibber_connection = await config_entry.runtime_data.async_get_client(hass)
await tibber_connection.rt_disconnect()
return unload_ok

View File

@@ -8,21 +8,16 @@ from typing import Any
import aiohttp
import tibber
from tibber import data_api as tibber_data_api
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
from .const import CONF_LEGACY_ACCESS_TOKEN, DATA_API_DEFAULT_SCOPES, DOMAIN
from .const import DATA_API_DEFAULT_SCOPES, DOMAIN
DATA_SCHEMA = vol.Schema({vol.Required(CONF_LEGACY_ACCESS_TOKEN): str})
ERR_TIMEOUT = "timeout"
ERR_CLIENT = "cannot_connect"
ERR_TOKEN = "invalid_access_token"
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
_LOGGER = logging.getLogger(__name__)
@@ -36,8 +31,7 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self._access_token: str | None = None
self._title = ""
self._oauth_data: dict[str, Any] | None = None
@property
def logger(self) -> logging.Logger:
@@ -52,114 +46,70 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if user_input is None:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors={},
)
self._access_token = user_input[CONF_LEGACY_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=self._access_token,
websession=async_get_clientsession(self.hass),
)
self._title = tibber_connection.name or "Tibber"
errors: dict[str, str] = {}
try:
await tibber_connection.update_info()
except TimeoutError:
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_CLIENT
if errors:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
await self.async_set_unique_id(tibber_connection.user_id)
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
self._abort_if_unique_id_mismatch(
reason="wrong_account",
description_placeholders={"title": reauth_entry.title},
)
else:
self._abort_if_unique_id_configured()
return await self.async_step_pick_implementation()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle a reauth flow."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication by reusing the user step."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
)
return self.async_show_form(step_id="reauth_confirm")
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Finalize the OAuth flow and create the config entry."""
if self._access_token is None:
return self.async_abort(reason="missing_configuration")
self._oauth_data = data
return await self._async_validate_and_create()
data[CONF_LEGACY_ACCESS_TOKEN] = self._access_token
async def async_step_connection_error(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle connection error retry."""
if user_input is not None:
return await self._async_validate_and_create()
return self.async_show_form(step_id="connection_error")
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
data_api_client = tibber_data_api.TibberDataAPI(
access_token,
async def _async_validate_and_create(self) -> ConfigFlowResult:
"""Validate the OAuth token and create the config entry."""
assert self._oauth_data is not None
access_token = self._oauth_data[CONF_TOKEN][CONF_ACCESS_TOKEN]
tibber_connection = tibber.Tibber(
access_token=access_token,
websession=async_get_clientsession(self.hass),
)
try:
await data_api_client.get_userinfo()
except (aiohttp.ClientError, TimeoutError):
return self.async_abort(reason="cannot_connect")
await tibber_connection.update_info()
except TimeoutError:
return await self.async_step_connection_error()
except tibber.InvalidLoginError:
return self.async_abort(reason=ERR_TOKEN)
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
):
return await self.async_step_connection_error()
except tibber.FatalHttpExceptionError:
return self.async_abort(reason=ERR_CLIENT)
await self.async_set_unique_id(tibber_connection.user_id)
title = tibber_connection.name or "Tibber"
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
self._abort_if_unique_id_mismatch(
reason="wrong_account",
description_placeholders={"title": reauth_entry.title},
)
return self.async_update_reload_and_abort(
reauth_entry,
data=data,
title=self._title,
data=self._oauth_data,
title=title,
)
return self.async_create_entry(title=self._title, data=data)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=title, data=self._oauth_data)

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
if TYPE_CHECKING:
from . import TibberRuntimeData
@@ -13,8 +12,6 @@ if TYPE_CHECKING:
type TibberConfigEntry = ConfigEntry[TibberRuntimeData]
CONF_LEGACY_ACCESS_TOKEN = CONF_ACCESS_TOKEN
AUTH_IMPLEMENTATION = "auth_implementation"
DATA_HASS_CONFIG = "tibber_hass_config"
DOMAIN = "tibber"

View File

@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, cast
from aiohttp.client_exceptions import ClientError
import tibber
from tibber.data_api import TibberDataAPI, TibberDevice
from tibber.data_api import TibberDevice
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.models import (
@@ -230,28 +230,26 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
return device_sensors.get(sensor_id)
return None
async def _async_get_client(self) -> TibberDataAPI:
"""Get the Tibber Data API client with error handling."""
async def _async_get_client(self) -> tibber.Tibber:
"""Get the Tibber client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
raise UpdateFailed(
f"Unable to create Tibber Data API client: {err}"
) from err
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
async def _async_setup(self) -> None:
"""Initial load of Tibber Data API devices."""
client = await self._async_get_client()
devices = await client.get_all_devices()
devices = await client.data_api.get_all_devices()
self._build_sensor_lookup(devices)
async def _async_update_data(self) -> dict[str, TibberDevice]:
"""Fetch the latest device capabilities from the Tibber Data API."""
client = await self._async_get_client()
try:
devices: dict[str, TibberDevice] = await client.update_devices()
devices: dict[str, TibberDevice] = await client.data_api.update_devices()
except tibber.exceptions.RateLimitExceededError as err:
raise UpdateFailed(
f"Rate limit exceeded, retry after {err.retry_after} seconds",

View File

@@ -15,6 +15,7 @@ async def async_get_config_entry_diagnostics(
"""Return diagnostics for a config entry."""
runtime = config_entry.runtime_data
tibber_connection = await runtime.async_get_client(hass)
result: dict[str, Any] = {
"homes": [
{
@@ -24,7 +25,7 @@ async def async_get_config_entry_diagnostics(
"last_cons_data_timestamp": home.last_cons_data_timestamp,
"country": home.country,
}
for home in runtime.tibber_connection.get_homes(only_active=False)
for home in tibber_connection.get_homes(only_active=False)
]
}

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
import tibber
from homeassistant.components.notify import (
ATTR_TITLE_DEFAULT,
NotifyEntity,
@@ -37,7 +39,9 @@ class TibberNotificationEntity(NotifyEntity):
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message to Tibber devices."""
tibber_connection = self._entry.runtime_data.tibber_connection
tibber_connection: tibber.Tibber = (
await self._entry.runtime_data.async_get_client(self.hass)
)
try:
await tibber_connection.send_notification(
title or ATTR_TITLE_DEFAULT, message

View File

@@ -605,7 +605,7 @@ async def _async_setup_graphql_sensors(
) -> None:
"""Set up the Tibber sensor."""
tibber_connection = entry.runtime_data.tibber_connection
tibber_connection = await entry.runtime_data.async_get_client(hass)
entity_registry = er.async_get(hass)

View File

@@ -42,7 +42,7 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
translation_domain=DOMAIN,
translation_key="no_config_entry",
)
tibber_connection = entries[0].runtime_data.tibber_connection
tibber_connection = await entries[0].runtime_data.async_get_client(call.hass)
start = __get_date(call.data.get(ATTR_START), "start")
end = __get_date(call.data.get(ATTR_END), "end")

View File

@@ -2,26 +2,21 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
},
"step": {
"connection_error": {
"description": "Could not connect to Tibber. Check your internet connection and try again.",
"title": "Connection failed"
},
"reauth_confirm": {
"description": "Reconnect your Tibber account to refresh access.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
},
"description": "Enter your access token from {url}"
}
}
},

View File

@@ -83,6 +83,14 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
return False
if entry.version == 2:
# 2 -> 2.2: Unique ID from integer to string
if entry.minor_version == 1:
minor_version = 2
hass.config_entries.async_update_entry(
entry, unique_id=str(entry.unique_id), minor_version=minor_version
)
return True

View File

@@ -20,6 +20,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
DOMAIN = DOMAIN
VERSION = 2
MINOR_VERSION = 2
agreements: list[Agreement]
data: dict[str, Any]
@@ -92,7 +93,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
if self.migrate_entry:
await self.hass.config_entries.async_remove(self.migrate_entry)
await self.async_set_unique_id(agreement.agreement_id)
await self.async_set_unique_id(str(agreement.agreement_id))
self._abort_if_unique_id_configured()
self.data[CONF_AGREEMENT_ID] = agreement.agreement_id

View File

@@ -8,6 +8,7 @@ import dataclasses
from uiprotect.data import (
NVR,
Camera,
Event,
ModelType,
MountType,
ProtectAdoptableDeviceModel,
@@ -644,6 +645,31 @@ class ProtectEventBinarySensor(EventEntityMixin, BinarySensorEntity):
self._attr_is_on = False
self._attr_extra_state_attributes = {}
@callback
def _find_active_event_with_object_type(
self, device: ProtectDeviceType
) -> Event | None:
"""Find an active event containing this sensor's object type.
Fallback for issue #152133: last_smart_detect_event_ids may not update
immediately when a new detection type is added to an ongoing event.
"""
obj_type = self.entity_description.ufp_obj_type
if obj_type is None or not isinstance(device, Camera):
return None
# Check known active event IDs from camera first (fast path)
for event_id in device.last_smart_detect_event_ids.values():
if (
event_id
and (event := self.data.api.bootstrap.events.get(event_id))
and event.end is None
and obj_type in event.smart_detect_types
):
return event
return None
@callback
def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None:
description = self.entity_description
@@ -651,9 +677,15 @@ class ProtectEventBinarySensor(EventEntityMixin, BinarySensorEntity):
prev_event = self._event
prev_event_end = self._event_end
super()._async_update_device_from_protect(device)
if event := description.get_event_obj(device):
event = description.get_event_obj(device)
if event is None:
# Fallback for #152133: check active events directly
event = self._find_active_event_with_object_type(device)
if event:
self._event = event
self._event_end = event.end if event else None
self._event_end = event.end
if not (
event

View File

@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["uiprotect==10.0.0", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==10.0.1", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -25,5 +25,5 @@
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["xiaomi-ble==1.5.0"]
"requirements": ["xiaomi-ble==1.6.0"]
}

View File

@@ -5824,7 +5824,7 @@
},
"sanix": {
"name": "Sanix",
"integration_type": "hub",
"integration_type": "device",
"config_flow": true,
"iot_class": "cloud_polling"
},

View File

@@ -370,9 +370,13 @@ def _async_get_connector(
return connectors[connector_key]
if verify_ssl:
ssl_context: SSLContext = ssl_util.client_context(ssl_cipher)
ssl_context: SSLContext = ssl_util.client_context(
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
)
else:
ssl_context = ssl_util.client_context_no_verify(ssl_cipher)
ssl_context = ssl_util.client_context_no_verify(
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
)
connector = HomeAssistantTCPConnector(
family=family,

View File

@@ -17,6 +17,9 @@ from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.loader import bind_hass
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.ssl import (
SSL_ALPN_HTTP11,
SSL_ALPN_HTTP11_HTTP2,
SSLALPNProtocols,
SSLCipherList,
client_context,
create_no_verify_ssl_context,
@@ -28,9 +31,9 @@ from .frame import warn_use
# and we want to keep the connection open for a while so we
# don't have to reconnect every time so we use 15s to match aiohttp.
KEEP_ALIVE_TIMEOUT = 15
DATA_ASYNC_CLIENT: HassKey[httpx.AsyncClient] = HassKey("httpx_async_client")
DATA_ASYNC_CLIENT_NOVERIFY: HassKey[httpx.AsyncClient] = HassKey(
"httpx_async_client_noverify"
# Shared httpx clients keyed by (verify_ssl, alpn_protocols)
DATA_ASYNC_CLIENT: HassKey[dict[tuple[bool, SSLALPNProtocols], httpx.AsyncClient]] = (
HassKey("httpx_async_client")
)
DEFAULT_LIMITS = limits = httpx.Limits(keepalive_expiry=KEEP_ALIVE_TIMEOUT)
SERVER_SOFTWARE = (
@@ -42,15 +45,26 @@ USER_AGENT = "User-Agent"
@callback
@bind_hass
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
def get_async_client(
hass: HomeAssistant,
verify_ssl: bool = True,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
) -> httpx.AsyncClient:
"""Return default httpx AsyncClient.
This method must be run in the event loop.
"""
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
if (client := hass.data.get(key)) is None:
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 to get a client configured for HTTP/2.
Clients are cached separately by ALPN protocol to ensure proper SSL context
configuration (ALPN protocols differ between HTTP versions).
"""
client_key = (verify_ssl, alpn_protocols)
clients = hass.data.setdefault(DATA_ASYNC_CLIENT, {})
if (client := clients.get(client_key)) is None:
client = clients[client_key] = create_async_httpx_client(
hass, verify_ssl, alpn_protocols=alpn_protocols
)
return client
@@ -77,6 +91,7 @@ def create_async_httpx_client(
verify_ssl: bool = True,
auto_cleanup: bool = True,
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
**kwargs: Any,
) -> httpx.AsyncClient:
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
@@ -84,13 +99,22 @@ def create_async_httpx_client(
If auto_cleanup is False, the client will be
automatically closed on homeassistant_stop.
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 for HTTP/2 support (automatically
enables httpx http2 mode).
This method must be run in the event loop.
"""
# Use the requested ALPN protocols directly to ensure proper SSL context
# bucketing. httpx/httpcore mutates SSL contexts by calling set_alpn_protocols(),
# so we pre-set the correct protocols to prevent shared context corruption.
ssl_context = (
client_context(ssl_cipher_list)
client_context(ssl_cipher_list, alpn_protocols)
if verify_ssl
else create_no_verify_ssl_context(ssl_cipher_list)
else create_no_verify_ssl_context(ssl_cipher_list, alpn_protocols)
)
# Enable httpx HTTP/2 mode when HTTP/2 protocol is requested
if alpn_protocols == SSL_ALPN_HTTP11_HTTP2:
kwargs.setdefault("http2", True)
client = HassHttpXAsyncClient(
verify=ssl_context,
headers={USER_AGENT: SERVER_SOFTWARE},

View File

@@ -8,6 +8,17 @@ import ssl
import certifi
# Type alias for ALPN protocols tuple (None means no ALPN protocols set)
type SSLALPNProtocols = tuple[str, ...] | None
# ALPN protocol configurations
# No ALPN protocols - used for libraries that don't support/need ALPN (e.g., aioimap)
SSL_ALPN_NONE: SSLALPNProtocols = None
# HTTP/1.1 only - used by default and for aiohttp (which doesn't support HTTP/2)
SSL_ALPN_HTTP11: SSLALPNProtocols = ("http/1.1",)
# HTTP/1.1 with HTTP/2 support - used when httpx http2=True
SSL_ALPN_HTTP11_HTTP2: SSLALPNProtocols = ("http/1.1", "h2")
class SSLCipherList(StrEnum):
"""SSL cipher lists."""
@@ -64,7 +75,10 @@ SSL_CIPHER_LISTS = {
@cache
def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
def _client_context_no_verify(
ssl_cipher_list: SSLCipherList,
alpn_protocols: SSLALPNProtocols,
) -> ssl.SSLContext:
# This is a copy of aiohttp's create_default_context() function, with the
# ssl verify turned off.
# https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
@@ -78,12 +92,18 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
sslcontext.set_default_verify_paths()
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
# from mutating the shared SSL context with different protocol settings.
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
if alpn_protocols is not None:
sslcontext.set_alpn_protocols(list(alpn_protocols))
return sslcontext
def _create_client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an independent SSL context for making requests."""
# Reuse environment variable definition from requests, since it's already a
@@ -96,6 +116,11 @@ def _create_client_context(
)
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
# from mutating the shared SSL context with different protocol settings.
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
if alpn_protocols is not None:
sslcontext.set_alpn_protocols(list(alpn_protocols))
return sslcontext
@@ -103,63 +128,63 @@ def _create_client_context(
@cache
def _client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
# Cached version of _create_client_context
return _create_client_context(ssl_cipher_list)
return _create_client_context(ssl_cipher_list, alpn_protocols)
# Create this only once and reuse it
_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT)
_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT)
_NO_VERIFY_SSL_CONTEXTS = {
SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE),
SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN),
SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE),
}
_SSL_CONTEXTS = {
SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE),
SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN),
SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE),
}
# Pre-warm the cache for ALL SSL context configurations at module load time.
# This is critical because creating SSL contexts loads certificates from disk,
# which is blocking I/O that must not happen in the event loop.
_SSL_ALPN_PROTOCOLS = (SSL_ALPN_NONE, SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2)
for _cipher in SSLCipherList:
for _alpn in _SSL_ALPN_PROTOCOLS:
_client_context(_cipher, _alpn)
_client_context_no_verify(_cipher, _alpn)
def get_default_context() -> ssl.SSLContext:
"""Return the default SSL context."""
return _DEFAULT_SSL_CONTEXT
return _client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
def get_default_no_verify_context() -> ssl.SSLContext:
"""Return the default SSL context that does not verify the server certificate."""
return _DEFAULT_NO_VERIFY_SSL_CONTEXT
return _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
def client_context_no_verify(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return a SSL context with no verification with a specific ssl cipher."""
return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT)
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
def client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an SSL context for making requests."""
return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT)
return _client_context(ssl_cipher_list, alpn_protocols)
def create_client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an independent SSL context for making requests."""
# This explicitly uses the non-cached version to create a client context
return _create_client_context(ssl_cipher_list)
return _create_client_context(ssl_cipher_list, alpn_protocols)
def create_no_verify_ssl_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an SSL context that does not verify the server certificate."""
return _client_context_no_verify(ssl_cipher_list)
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
def server_context_modern() -> ssl.SSLContext:

View File

@@ -104,6 +104,7 @@ _AMBIENT_IDEAL_GAS_MOLAR_VOLUME = ( # m3⋅mol⁻¹
# Molar masses in g⋅mol⁻¹
_CARBON_MONOXIDE_MOLAR_MASS = 28.01
_NITROGEN_DIOXIDE_MOLAR_MASS = 46.0055
_NITROGEN_MONOXIDE_MOLAR_MASS = 30.0061
_OZONE_MOLAR_MASS = 48.00
_SULPHUR_DIOXIDE_MOLAR_MASS = 64.066
@@ -188,6 +189,52 @@ class BaseUnitConverter:
return (from_unit in cls._UNIT_INVERSES) != (to_unit in cls._UNIT_INVERSES)
class ApparentPowerConverter(BaseUnitConverter):
"""Utility to convert apparent power values."""
UNIT_CLASS = "apparent_power"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfApparentPower.MILLIVOLT_AMPERE: 1 * 1000,
UnitOfApparentPower.VOLT_AMPERE: 1,
UnitOfApparentPower.KILO_VOLT_AMPERE: 1 / 1000,
}
VALID_UNITS = {
UnitOfApparentPower.MILLIVOLT_AMPERE,
UnitOfApparentPower.VOLT_AMPERE,
UnitOfApparentPower.KILO_VOLT_AMPERE,
}
class AreaConverter(BaseUnitConverter):
"""Utility to convert area values."""
UNIT_CLASS = "area"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfArea.SQUARE_METERS: 1,
UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2,
UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2,
UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2,
UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2,
UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2,
UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2,
UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2,
UnitOfArea.ACRES: 1 / _ACRE_TO_M2,
UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2,
}
VALID_UNITS = set(UnitOfArea)
class BloodGlucoseConcentrationConverter(BaseUnitConverter):
"""Utility to convert blood glucose concentration values."""
UNIT_CLASS = "blood_glucose_concentration"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18,
UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1,
}
VALID_UNITS = set(UnitOfBloodGlucoseConcentration)
class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
"""Convert carbon monoxide ratio to mass per volume.
@@ -213,36 +260,16 @@ class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
}
class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
"""Convert nitrogen dioxide ratio to mass per volume."""
class ConductivityConverter(BaseUnitConverter):
"""Utility to convert electric current values."""
UNIT_CLASS = "nitrogen_dioxide"
UNIT_CLASS = "conductivity"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_NITROGEN_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
"""Convert sulphur dioxide ratio to mass per volume."""
UNIT_CLASS = "sulphur_dioxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_SULPHUR_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
UnitOfConductivity.MICROSIEMENS_PER_CM: 1,
UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3,
UnitOfConductivity.SIEMENS_PER_CM: 1e-6,
}
VALID_UNITS = set(UnitOfConductivity)
class DataRateConverter(BaseUnitConverter):
@@ -266,25 +293,6 @@ class DataRateConverter(BaseUnitConverter):
VALID_UNITS = set(UnitOfDataRate)
class AreaConverter(BaseUnitConverter):
"""Utility to convert area values."""
UNIT_CLASS = "area"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfArea.SQUARE_METERS: 1,
UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2,
UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2,
UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2,
UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2,
UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2,
UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2,
UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2,
UnitOfArea.ACRES: 1 / _ACRE_TO_M2,
UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2,
}
VALID_UNITS = set(UnitOfArea)
class DistanceConverter(BaseUnitConverter):
"""Utility to convert distance values."""
@@ -313,27 +321,28 @@ class DistanceConverter(BaseUnitConverter):
}
class BloodGlucoseConcentrationConverter(BaseUnitConverter):
"""Utility to convert blood glucose concentration values."""
class DurationConverter(BaseUnitConverter):
"""Utility to convert duration values."""
UNIT_CLASS = "blood_glucose_concentration"
UNIT_CLASS = "duration"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18,
UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1,
UnitOfTime.MICROSECONDS: 1000000,
UnitOfTime.MILLISECONDS: 1000,
UnitOfTime.SECONDS: 1,
UnitOfTime.MINUTES: 1 / _MIN_TO_SEC,
UnitOfTime.HOURS: 1 / _HRS_TO_SECS,
UnitOfTime.DAYS: 1 / _DAYS_TO_SECS,
UnitOfTime.WEEKS: 1 / (7 * _DAYS_TO_SECS),
}
VALID_UNITS = set(UnitOfBloodGlucoseConcentration)
class ConductivityConverter(BaseUnitConverter):
"""Utility to convert electric current values."""
UNIT_CLASS = "conductivity"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfConductivity.MICROSIEMENS_PER_CM: 1,
UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3,
UnitOfConductivity.SIEMENS_PER_CM: 1e-6,
VALID_UNITS = {
UnitOfTime.MICROSECONDS,
UnitOfTime.MILLISECONDS,
UnitOfTime.SECONDS,
UnitOfTime.MINUTES,
UnitOfTime.HOURS,
UnitOfTime.DAYS,
UnitOfTime.WEEKS,
}
VALID_UNITS = set(UnitOfConductivity)
class ElectricCurrentConverter(BaseUnitConverter):
@@ -462,19 +471,67 @@ class MassConverter(BaseUnitConverter):
}
class ApparentPowerConverter(BaseUnitConverter):
"""Utility to convert apparent power values."""
class MassVolumeConcentrationConverter(BaseUnitConverter):
"""Utility to convert mass volume concentration values."""
UNIT_CLASS = "apparent_power"
UNIT_CLASS = "concentration"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfApparentPower.MILLIVOLT_AMPERE: 1 * 1000,
UnitOfApparentPower.VOLT_AMPERE: 1,
UnitOfApparentPower.KILO_VOLT_AMPERE: 1 / 1000,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1000000.0, # 1000 µg/m³ = 1 mg/m³
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 1000.0, # 1000 mg/m³ = 1 g/m³
CONCENTRATION_GRAMS_PER_CUBIC_METER: 1.0,
}
VALID_UNITS = {
UnitOfApparentPower.MILLIVOLT_AMPERE,
UnitOfApparentPower.VOLT_AMPERE,
UnitOfApparentPower.KILO_VOLT_AMPERE,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_GRAMS_PER_CUBIC_METER,
}
class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
"""Convert nitrogen dioxide ratio to mass per volume."""
UNIT_CLASS = "nitrogen_dioxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_NITROGEN_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class NitrogenMonoxideConcentrationConverter(BaseUnitConverter):
"""Convert nitrogen monoxide ratio to mass per volume."""
UNIT_CLASS = "nitrogen_monoxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_NITROGEN_MONOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class OzoneConcentrationConverter(BaseUnitConverter):
"""Convert ozone ratio to mass per volume."""
UNIT_CLASS = "ozone"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
@@ -563,22 +620,6 @@ class ReactivePowerConverter(BaseUnitConverter):
}
class OzoneConcentrationConverter(BaseUnitConverter):
"""Convert ozone ratio to mass per volume."""
UNIT_CLASS = "ozone"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class SpeedConverter(BaseUnitConverter):
"""Utility to convert speed values."""
@@ -679,6 +720,22 @@ class SpeedConverter(BaseUnitConverter):
return float(0.836 * beaufort ** (3 / 2))
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
"""Convert sulphur dioxide ratio to mass per volume."""
UNIT_CLASS = "sulphur_dioxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_SULPHUR_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class TemperatureConverter(BaseUnitConverter):
"""Utility to convert temperature values."""
@@ -849,22 +906,6 @@ class UnitlessRatioConverter(BaseUnitConverter):
}
class MassVolumeConcentrationConverter(BaseUnitConverter):
"""Utility to convert mass volume concentration values."""
UNIT_CLASS = "concentration"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1000000.0, # 1000 µg/m³ = 1 mg/m³
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 1000.0, # 1000 mg/m³ = 1 g/m³
CONCENTRATION_GRAMS_PER_CUBIC_METER: 1.0,
}
VALID_UNITS = {
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_GRAMS_PER_CUBIC_METER,
}
class VolumeConverter(BaseUnitConverter):
"""Utility to convert volume values."""
@@ -927,27 +968,3 @@ class VolumeFlowRateConverter(BaseUnitConverter):
UnitOfVolumeFlowRate.GALLONS_PER_DAY,
UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
}
class DurationConverter(BaseUnitConverter):
"""Utility to convert duration values."""
UNIT_CLASS = "duration"
_UNIT_CONVERSION: dict[str | None, float] = {
UnitOfTime.MICROSECONDS: 1000000,
UnitOfTime.MILLISECONDS: 1000,
UnitOfTime.SECONDS: 1,
UnitOfTime.MINUTES: 1 / _MIN_TO_SEC,
UnitOfTime.HOURS: 1 / _HRS_TO_SECS,
UnitOfTime.DAYS: 1 / _DAYS_TO_SECS,
UnitOfTime.WEEKS: 1 / (7 * _DAYS_TO_SECS),
}
VALID_UNITS = {
UnitOfTime.MICROSECONDS,
UnitOfTime.MILLISECONDS,
UnitOfTime.SECONDS,
UnitOfTime.MINUTES,
UnitOfTime.HOURS,
UnitOfTime.DAYS,
UnitOfTime.WEEKS,
}

View File

@@ -676,7 +676,7 @@ exclude_lines = [
]
[tool.ruff]
required-version = ">=0.13.0"
required-version = ">=0.14.13"
[tool.ruff.lint]
select = [

10
requirements_all.txt generated
View File

@@ -1184,7 +1184,7 @@ hassil==3.5.0
hdate[astral]==1.1.2
# homeassistant.components.hdfury
hdfury==1.3.1
hdfury==1.4.2
# homeassistant.components.heatmiser
heatmiserV3==2.0.4
@@ -1281,7 +1281,7 @@ ihcsdk==2.8.5
imeon_inverter_api==0.4.0
# homeassistant.components.imgw_pib
imgw_pib==1.6.0
imgw_pib==2.0.1
# homeassistant.components.incomfort
incomfort-client==0.6.11
@@ -1909,7 +1909,7 @@ pyatag==0.3.5.3
pyatmo==9.2.3
# homeassistant.components.apple_tv
pyatv==0.16.1;python_version<'3.14'
pyatv==0.17.0
# homeassistant.components.aussie_broadband
pyaussiebb==0.1.5
@@ -3080,7 +3080,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==10.0.0
uiprotect==10.0.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -3215,7 +3215,7 @@ wsdot==0.0.1
wyoming==1.7.2
# homeassistant.components.xiaomi_ble
xiaomi-ble==1.5.0
xiaomi-ble==1.6.0
# homeassistant.components.knx
xknx==3.14.0

View File

@@ -1051,7 +1051,7 @@ hassil==3.5.0
hdate[astral]==1.1.2
# homeassistant.components.hdfury
hdfury==1.3.1
hdfury==1.4.2
# homeassistant.components.here_travel_time
here-routing==1.2.0
@@ -1127,7 +1127,7 @@ igloohome-api==0.1.1
imeon_inverter_api==0.4.0
# homeassistant.components.imgw_pib
imgw_pib==1.6.0
imgw_pib==2.0.1
# homeassistant.components.incomfort
incomfort-client==0.6.11
@@ -1637,7 +1637,7 @@ pyatag==0.3.5.3
pyatmo==9.2.3
# homeassistant.components.apple_tv
pyatv==0.16.1;python_version<'3.14'
pyatv==0.17.0
# homeassistant.components.aussie_broadband
pyaussiebb==0.1.5
@@ -2577,7 +2577,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==10.0.0
uiprotect==10.0.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7
@@ -2691,7 +2691,7 @@ wsdot==0.0.1
wyoming==1.7.2
# homeassistant.components.xiaomi_ble
xiaomi-ble==1.5.0
xiaomi-ble==1.6.0
# homeassistant.components.knx
xknx==3.14.0

View File

@@ -1,5 +1,5 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
codespell==2.4.1
ruff==0.13.0
ruff==0.14.13
yamllint==1.37.1

View File

@@ -26,7 +26,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.9.17,source=/uv,target=/bin/uv \
-r /usr/src/homeassistant/requirements.txt \
pipdeptree==2.26.1 \
tqdm==4.67.1 \
ruff==0.13.0
ruff==0.14.13
LABEL "name"="hassfest"
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"

View File

@@ -117,7 +117,6 @@ FORBIDDEN_PACKAGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
"airthings": {"airthings-cloud": {"async-timeout"}},
"ampio": {"asmog": {"async-timeout"}},
"apache_kafka": {"aiokafka": {"async-timeout"}},
"apple_tv": {"pyatv": {"async-timeout"}},
"blackbird": {
# https://github.com/koolsb/pyblackbird/issues/12
# pyblackbird > pyserial-asyncio

View File

@@ -1,9 +1,6 @@
"""Tests for Apple TV."""
import sys
import pytest
if sys.version_info < (3, 14):
# Make asserts in the common module display differences
pytest.register_assert_rewrite("tests.components.apple_tv.common")
# Make asserts in the common module display differences
pytest.register_assert_rewrite("tests.components.apple_tv.common")

View File

@@ -1,20 +1,14 @@
"""Fixtures for component."""
from collections.abc import Generator
import sys
from unittest.mock import AsyncMock, MagicMock, patch
from pyatv import conf
from pyatv.const import PairingRequirement, Protocol
from pyatv.support import http
import pytest
if sys.version_info < (3, 14):
from pyatv import conf
from pyatv.const import PairingRequirement, Protocol
from pyatv.support import http
from .common import MockPairingHandler, airplay_service, create_conf, mrp_service
if sys.version_info >= (3, 14):
collect_ignore_glob = ["test_*.py"]
from .common import MockPairingHandler, airplay_service, create_conf, mrp_service
@pytest.fixture(autouse=True, name="mock_scan")

View File

@@ -27,7 +27,10 @@ def mock_setup_entry() -> Generator[AsyncMock]:
def mock_config_entry(hass: HomeAssistant, mock_arve: MagicMock) -> MockConfigEntry:
"""Return the default mocked config entry."""
return MockConfigEntry(
title="Arve", domain=DOMAIN, data=USER_INPUT, unique_id=mock_arve.customer_id
title="Arve",
domain=DOMAIN,
data=USER_INPUT,
unique_id=str(mock_arve.customer_id),
)

View File

@@ -34,7 +34,7 @@ async def test_correct_flow(
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["data"] == USER_INPUT
assert len(mock_setup_entry.mock_calls) == 1
assert result2["result"].unique_id == 12345
assert result2["result"].unique_id == "12345"
async def test_form_cannot_connect(

View File

@@ -0,0 +1,26 @@
"""Tests for the Arve component."""
from unittest.mock import patch
from homeassistant.components.arve.const import DOMAIN
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_SECRET
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
"""Test migrating a 1.1 config entry to 1.2."""
with patch("homeassistant.components.arve.async_setup_entry", return_value=True):
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_ACCESS_TOKEN: "mock", CONF_CLIENT_SECRET: "mock"},
version=1,
minor_version=1,
unique_id=12345,
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
assert entry.version == 1
assert entry.minor_version == 2
assert entry.unique_id == "12345"

View File

@@ -27,7 +27,11 @@ async def target_fans(hass: HomeAssistant) -> list[str]:
@pytest.fixture
async def target_switches(hass: HomeAssistant) -> list[str]:
"""Create multiple switch entities associated with different targets."""
"""Create multiple switch entities associated with different targets.
Note: The switches are used to ensure that only fan entities are considered
in the condition evaluation and not other toggle entities.
"""
return (await target_entities(hass, "switch"))["included"]

View File

@@ -1 +1,13 @@
"""Tests for the GitHub integration."""
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
"""Method for setting up the component."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()

View File

@@ -1,53 +0,0 @@
"""Common helpers for GitHub integration tests."""
from __future__ import annotations
import json
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, async_load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
MOCK_ACCESS_TOKEN = "gho_16C7e42F292c6912E7710c838347Ae178B4a"
TEST_REPOSITORY = "octocat/Hello-World"
async def setup_github_integration(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
add_entry_to_hass: bool = True,
) -> None:
"""Mock setting up the integration."""
headers = json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN))
for idx, repository in enumerate(mock_config_entry.options[CONF_REPOSITORIES]):
aioclient_mock.get(
f"https://api.github.com/repos/{repository}",
json={
**json.loads(await async_load_fixture(hass, "repository.json", DOMAIN)),
"full_name": repository,
"id": idx,
},
headers=headers,
)
aioclient_mock.get(
f"https://api.github.com/repos/{repository}/events",
json=[],
headers=headers,
)
aioclient_mock.post(
"https://api.github.com/graphql",
json=json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN)),
headers=headers,
)
if add_entry_to_hass:
mock_config_entry.add_to_hass(hass)
setup_result = await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert setup_result
assert mock_config_entry.state is ConfigEntryState.LOADED

View File

@@ -1,18 +1,27 @@
"""conftest for the GitHub integration."""
import asyncio
from collections.abc import Generator
from unittest.mock import patch
from unittest.mock import AsyncMock, MagicMock, patch
from aiogithubapi import (
GitHubLoginDeviceModel,
GitHubLoginOauthModel,
GitHubRateLimitModel,
)
import pytest
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from .common import MOCK_ACCESS_TOKEN, TEST_REPOSITORY, setup_github_integration
from .const import MOCK_ACCESS_TOKEN, TEST_REPOSITORY
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.common import (
MockConfigEntry,
async_load_json_object_fixture,
load_json_object_fixture,
)
@pytest.fixture
@@ -34,11 +43,93 @@ def mock_setup_entry() -> Generator[None]:
@pytest.fixture
async def init_integration(
def device_activation_event() -> asyncio.Event:
"""Fixture to provide an asyncio event for device activation."""
return asyncio.Event()
@pytest.fixture
def github_device_client(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
) -> MockConfigEntry:
"""Set up the GitHub integration for testing."""
await setup_github_integration(hass, mock_config_entry, aioclient_mock)
return mock_config_entry
device_activation_event: asyncio.Event,
) -> Generator[AsyncMock]:
"""Mock GitHub device client."""
with patch(
"homeassistant.components.github.config_flow.GitHubDeviceAPI",
autospec=True,
) as github_client_mock:
client = github_client_mock.return_value
register_object = AsyncMock()
register_object.data = GitHubLoginDeviceModel(
load_json_object_fixture("device_register.json", DOMAIN)
)
client.register.return_value = register_object
async def mock_api_device_activation(device_code) -> AsyncMock:
# Simulate the device activation process
await device_activation_event.wait()
activate_object = AsyncMock()
activate_object.data = GitHubLoginOauthModel(
await async_load_json_object_fixture(
hass, "device_activate.json", DOMAIN
)
)
return activate_object
client.activation = mock_api_device_activation
yield client
@pytest.fixture
def github_client(hass: HomeAssistant) -> Generator[AsyncMock]:
"""Mock GitHub device client."""
with (
patch(
"homeassistant.components.github.config_flow.GitHubAPI",
autospec=True,
) as github_client_mock,
patch("homeassistant.components.github.GitHubAPI", new=github_client_mock),
patch(
"homeassistant.components.github.diagnostics.GitHubAPI",
new=github_client_mock,
),
):
client = github_client_mock.return_value
client.user.starred = AsyncMock(
side_effect=[
MagicMock(
is_last_page=False,
next_page_number=2,
last_page_number=2,
data=[MagicMock(full_name="home-assistant/core")],
),
MagicMock(
is_last_page=True,
data=[MagicMock(full_name="home-assistant/frontend")],
),
]
)
client.user.repos = AsyncMock(
side_effect=[
MagicMock(
is_last_page=False,
next_page_number=2,
last_page_number=2,
data=[MagicMock(full_name="home-assistant/operating-system")],
),
MagicMock(
is_last_page=True,
data=[MagicMock(full_name="esphome/esphome")],
),
]
)
rate_limit_mock = AsyncMock()
rate_limit_mock.data = GitHubRateLimitModel(
load_json_object_fixture("rate_limit.json", DOMAIN)
)
client.rate_limit.return_value = rate_limit_mock
graphql_mock = AsyncMock()
graphql_mock.data = load_json_object_fixture("graphql.json", DOMAIN)
client.graphql.return_value = graphql_mock
client.repos.events.subscribe = AsyncMock()
yield client

View File

@@ -0,0 +1,4 @@
"""Constants for GitHub integration tests."""
MOCK_ACCESS_TOKEN = "gho_16C7e42F292c6912E7710c838347Ae178B4a"
TEST_REPOSITORY = "octocat/Hello-World"

View File

@@ -1,29 +0,0 @@
{
"Server": "GitHub.com",
"Date": "Mon, 1 Jan 1970 00:00:00 GMT",
"Content-Type": "application/json; charset=utf-8",
"Transfer-Encoding": "chunked",
"Cache-Control": "private, max-age=60, s-maxage=60",
"Vary": "Accept, Authorization, Cookie, X-GitHub-OTP",
"Etag": "W/\"1234567890abcdefghijklmnopqrstuvwxyz\"",
"X-OAuth-Scopes": "",
"X-Accepted-OAuth-Scopes": "",
"github-authentication-token-expiration": "1970-01-01 01:00:00 UTC",
"X-GitHub-Media-Type": "github.v3; param=raw; format=json",
"X-RateLimit-Limit": "5000",
"X-RateLimit-Remaining": "4999",
"X-RateLimit-Reset": "1",
"X-RateLimit-Used": "1",
"X-RateLimit-Resource": "core",
"Access-Control-Expose-Headers": "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, Deprecation, Sunset",
"Access-Control-Allow-Origin": "*",
"Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload",
"X-Frame-Options": "deny",
"X-Content-Type-Options": "nosniff",
"X-XSS-Protection": "0",
"Referrer-Policy": "origin-when-cross-origin, strict-origin-when-cross-origin",
"Content-Security-Policy": "default-src 'none'",
"Content-Encoding": "gzip",
"Permissions-Policy": "",
"X-GitHub-Request-Id": "12A3:45BC:6D7890:12EF34:5678G901"
}

View File

@@ -0,0 +1,5 @@
{
"access_token": "gho_16C7e42F292c6912E7710c838347Ae178B4a",
"token_type": "bearer",
"scope": ""
}

View File

@@ -0,0 +1,7 @@
{
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
"user_code": "WDJB-MJHT",
"verification_uri": "https://github.com/login/device",
"expires_in": 900,
"interval": 5
}

View File

@@ -0,0 +1 @@
{ "resources": { "core": { "remaining": 100, "limit": 100 } } }

View File

@@ -1,146 +1,100 @@
"""Test the GitHub config flow."""
from unittest.mock import AsyncMock, MagicMock, patch
import asyncio
from unittest.mock import AsyncMock, MagicMock
from aiogithubapi import GitHubException
from freezegun.api import FrozenDateTimeFactory
import pytest
from homeassistant import config_entries
from homeassistant.components.github.config_flow import get_repositories
from homeassistant.components.github.const import (
CONF_REPOSITORIES,
DEFAULT_REPOSITORIES,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType, UnknownFlow
from .common import MOCK_ACCESS_TOKEN
from .const import MOCK_ACCESS_TOKEN
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_full_user_flow_implementation(
hass: HomeAssistant,
mock_setup_entry: None,
aioclient_mock: AiohttpClientMocker,
freezer: FrozenDateTimeFactory,
github_device_client: AsyncMock,
github_client: AsyncMock,
device_activation_event: asyncio.Event,
) -> None:
"""Test the full manual user flow from start to finish."""
aioclient_mock.post(
"https://github.com/login/device/code",
json={
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
"user_code": "WDJB-MJHT",
"verification_uri": "https://github.com/login/device",
"expires_in": 900,
"interval": 5,
},
headers={"Content-Type": "application/json"},
)
# User has not yet entered the code
aioclient_mock.post(
"https://github.com/login/oauth/access_token",
json={"error": "authorization_pending"},
headers={"Content-Type": "application/json"},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
DOMAIN, context={"source": SOURCE_USER}
)
assert result["step_id"] == "device"
assert result["type"] is FlowResultType.SHOW_PROGRESS
# User enters the code
aioclient_mock.clear_requests()
aioclient_mock.post(
"https://github.com/login/oauth/access_token",
json={
CONF_ACCESS_TOKEN: MOCK_ACCESS_TOKEN,
"token_type": "bearer",
"scope": "",
},
headers={"Content-Type": "application/json"},
)
freezer.tick(10)
device_activation_event.set()
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["step_id"] == "repositories"
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
schema = result["data_schema"]
repositories = schema.schema[CONF_REPOSITORIES].options
assert len(repositories) == 4
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_REPOSITORIES: DEFAULT_REPOSITORIES,
},
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
)
assert result["title"] == ""
assert result["type"] is FlowResultType.CREATE_ENTRY
assert "data" in result
assert result["data"][CONF_ACCESS_TOKEN] == MOCK_ACCESS_TOKEN
assert "options" in result
assert result["options"][CONF_REPOSITORIES] == DEFAULT_REPOSITORIES
assert result["data"] == {CONF_ACCESS_TOKEN: MOCK_ACCESS_TOKEN}
assert result["options"] == {CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
async def test_flow_with_registration_failure(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
github_device_client: AsyncMock,
) -> None:
"""Test flow with registration failure of the device."""
aioclient_mock.post(
"https://github.com/login/device/code",
exc=GitHubException("Registration failed"),
)
github_device_client.register.side_effect = GitHubException("Registration failed")
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.ABORT
assert result.get("reason") == "could_not_register"
assert result["reason"] == "could_not_register"
async def test_flow_with_activation_failure(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
freezer: FrozenDateTimeFactory,
github_device_client: AsyncMock,
device_activation_event: asyncio.Event,
) -> None:
"""Test flow with activation failure of the device."""
aioclient_mock.post(
"https://github.com/login/device/code",
json={
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
"user_code": "WDJB-MJHT",
"verification_uri": "https://github.com/login/device",
"expires_in": 900,
"interval": 5,
},
headers={"Content-Type": "application/json"},
)
# User has not yet entered the code
aioclient_mock.post(
"https://github.com/login/oauth/access_token",
json={"error": "authorization_pending"},
headers={"Content-Type": "application/json"},
)
async def mock_api_device_activation(device_code) -> None:
# Simulate the device activation process
await device_activation_event.wait()
raise GitHubException("Activation failed")
github_device_client.activation = mock_api_device_activation
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
DOMAIN, context={"source": SOURCE_USER}
)
assert result["step_id"] == "device"
assert result["type"] is FlowResultType.SHOW_PROGRESS
# Activation fails
aioclient_mock.clear_requests()
aioclient_mock.post(
"https://github.com/login/oauth/access_token",
exc=GitHubException("Activation failed"),
)
freezer.tick(10)
device_activation_event.set()
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
@@ -149,30 +103,14 @@ async def test_flow_with_activation_failure(
async def test_flow_with_remove_while_activating(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
hass: HomeAssistant, github_device_client: AsyncMock
) -> None:
"""Test flow with user canceling while activating."""
aioclient_mock.post(
"https://github.com/login/device/code",
json={
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
"user_code": "WDJB-MJHT",
"verification_uri": "https://github.com/login/device",
"expires_in": 900,
"interval": 5,
},
headers={"Content-Type": "application/json"},
)
aioclient_mock.post(
"https://github.com/login/oauth/access_token",
json={"error": "authorization_pending"},
headers={"Content-Type": "application/json"},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
DOMAIN, context={"source": SOURCE_USER}
)
assert result["step_id"] == "device"
assert result["type"] is FlowResultType.SHOW_PROGRESS
@@ -194,84 +132,88 @@ async def test_already_configured(
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.ABORT
assert result.get("reason") == "already_configured"
assert result["reason"] == "already_configured"
async def test_starred_pagination_with_paginated_result(hass: HomeAssistant) -> None:
"""Test pagination of starred repositories with paginated result."""
with patch(
"homeassistant.components.github.config_flow.GitHubAPI",
return_value=MagicMock(
user=MagicMock(
starred=AsyncMock(
return_value=MagicMock(
is_last_page=False,
next_page_number=2,
last_page_number=2,
data=[MagicMock(full_name="home-assistant/core")],
)
),
repos=AsyncMock(
return_value=MagicMock(
is_last_page=False,
next_page_number=2,
last_page_number=2,
data=[MagicMock(full_name="awesome/reposiotry")],
)
),
)
),
):
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
async def test_no_repositories(
hass: HomeAssistant,
mock_setup_entry: None,
github_device_client: AsyncMock,
github_client: AsyncMock,
device_activation_event: asyncio.Event,
) -> None:
"""Test the full manual user flow from start to finish."""
assert len(repos) == 2
assert repos[-1] == DEFAULT_REPOSITORIES[0]
github_client.user.repos.side_effect = [MagicMock(is_last_page=True, data=[])]
github_client.user.starred.side_effect = [MagicMock(is_last_page=True, data=[])]
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["step_id"] == "device"
assert result["type"] is FlowResultType.SHOW_PROGRESS
device_activation_event.set()
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["step_id"] == "repositories"
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
schema = result["data_schema"]
repositories = schema.schema[CONF_REPOSITORIES].options
assert len(repositories) == 2
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
)
assert result["type"] is FlowResultType.CREATE_ENTRY
async def test_starred_pagination_with_no_starred(hass: HomeAssistant) -> None:
"""Test pagination of starred repositories with no starred."""
with patch(
"homeassistant.components.github.config_flow.GitHubAPI",
return_value=MagicMock(
user=MagicMock(
starred=AsyncMock(
return_value=MagicMock(
is_last_page=True,
data=[],
)
),
repos=AsyncMock(
return_value=MagicMock(
is_last_page=True,
data=[],
)
),
)
),
):
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
async def test_exception_during_repository_fetch(
hass: HomeAssistant,
mock_setup_entry: None,
github_device_client: AsyncMock,
github_client: AsyncMock,
device_activation_event: asyncio.Event,
) -> None:
"""Test the full manual user flow from start to finish."""
assert len(repos) == 2
assert repos == DEFAULT_REPOSITORIES
github_client.user.repos.side_effect = GitHubException()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
async def test_starred_pagination_with_exception(hass: HomeAssistant) -> None:
"""Test pagination of starred repositories with exception."""
with patch(
"homeassistant.components.github.config_flow.GitHubAPI",
return_value=MagicMock(
user=MagicMock(starred=AsyncMock(side_effect=GitHubException("Error")))
),
):
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
assert result["step_id"] == "device"
assert result["type"] is FlowResultType.SHOW_PROGRESS
assert len(repos) == 2
assert repos == DEFAULT_REPOSITORIES
device_activation_event.set()
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["step_id"] == "repositories"
assert result["type"] is FlowResultType.FORM
assert not result["errors"]
schema = result["data_schema"]
repositories = schema.schema[CONF_REPOSITORIES].options
assert len(repositories) == 2
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
)
assert result["type"] is FlowResultType.CREATE_ENTRY
async def test_options_flow(

View File

@@ -1,89 +1,56 @@
"""Test GitHub diagnostics."""
import json
from unittest.mock import AsyncMock
from aiogithubapi import GitHubException
import pytest
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
from homeassistant.core import HomeAssistant
from .common import setup_github_integration
from . import setup_integration
from tests.common import MockConfigEntry, async_load_fixture
from tests.common import MockConfigEntry
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_entry_diagnostics(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
github_client: AsyncMock,
) -> None:
"""Test config entry diagnostics."""
mock_config_entry.add_to_hass(hass)
hass.config_entries.async_update_entry(
mock_config_entry,
options={CONF_REPOSITORIES: ["home-assistant/core"]},
)
response_json = json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN))
response_json["data"]["repository"]["full_name"] = "home-assistant/core"
aioclient_mock.post(
"https://api.github.com/graphql",
json=response_json,
headers=json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN)),
)
aioclient_mock.get(
"https://api.github.com/rate_limit",
json={"resources": {"core": {"remaining": 100, "limit": 100}}},
headers={"Content-Type": "application/json"},
)
await setup_github_integration(
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
)
await setup_integration(hass, mock_config_entry)
result = await get_diagnostics_for_config_entry(
hass,
hass_client,
mock_config_entry,
)
assert result["options"]["repositories"] == ["home-assistant/core"]
assert result["options"]["repositories"] == ["octocat/Hello-World"]
assert result["rate_limit"] == {
"resources": {"core": {"remaining": 100, "limit": 100}}
}
assert (
result["repositories"]["home-assistant/core"]["full_name"]
== "home-assistant/core"
result["repositories"]["octocat/Hello-World"]["full_name"]
== "octocat/Hello-World"
)
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_entry_diagnostics_exception(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
init_integration: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
github_client: AsyncMock,
) -> None:
"""Test config entry diagnostics with exception for ratelimit."""
aioclient_mock.get(
"https://api.github.com/rate_limit",
exc=GitHubException("error"),
)
await setup_integration(hass, mock_config_entry)
github_client.rate_limit.side_effect = GitHubException("error")
result = await get_diagnostics_for_config_entry(
hass,
hass_client,
init_integration,
mock_config_entry,
)
assert (
result["rate_limit"]["error"]
== "Unexpected exception for 'https://api.github.com/rate_limit' with - error"
)
assert result["rate_limit"]["error"] == "error"

View File

@@ -1,24 +1,23 @@
"""Test the GitHub init file."""
from unittest.mock import AsyncMock
import pytest
from homeassistant.components.github import CONF_REPOSITORIES
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er, icon
from .common import setup_github_integration
from . import setup_integration
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_device_registry_cleanup(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
github_client: AsyncMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that we remove untracked repositories from the device registry."""
@@ -27,9 +26,7 @@ async def test_device_registry_cleanup(
mock_config_entry,
options={CONF_REPOSITORIES: ["home-assistant/core"]},
)
await setup_github_integration(
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
)
await setup_integration(hass, mock_config_entry)
devices = dr.async_entries_for_config_entry(
registry=device_registry,
@@ -58,12 +55,10 @@ async def test_device_registry_cleanup(
assert len(devices) == 0
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_subscription_setup(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
github_client: AsyncMock,
) -> None:
"""Test that we setup event subscription."""
mock_config_entry.add_to_hass(hass)
@@ -72,21 +67,14 @@ async def test_subscription_setup(
options={CONF_REPOSITORIES: ["home-assistant/core"]},
pref_disable_polling=False,
)
await setup_github_integration(
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
)
assert (
"https://api.github.com/repos/home-assistant/core/events" in x[1]
for x in aioclient_mock.mock_calls
)
await setup_integration(hass, mock_config_entry)
github_client.repos.events.subscribe.assert_called_once()
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_subscription_setup_polling_disabled(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
github_client: AsyncMock,
) -> None:
"""Test that we do not setup event subscription if polling is disabled."""
mock_config_entry.add_to_hass(hass)
@@ -95,13 +83,8 @@ async def test_subscription_setup_polling_disabled(
options={CONF_REPOSITORIES: ["home-assistant/core"]},
pref_disable_polling=True,
)
await setup_github_integration(
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
)
assert (
"https://api.github.com/repos/home-assistant/core/events" not in x[1]
for x in aioclient_mock.mock_calls
)
await setup_integration(hass, mock_config_entry)
github_client.repos.events.subscribe.assert_not_called()
# Prove that we subscribed if the user enabled polling again
hass.config_entries.async_update_entry(
@@ -109,23 +92,20 @@ async def test_subscription_setup_polling_disabled(
)
assert await hass.config_entries.async_reload(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert (
"https://api.github.com/repos/home-assistant/core/events" in x[1]
for x in aioclient_mock.mock_calls
)
github_client.repos.events.subscribe.assert_called_once()
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_sensor_icons(
hass: HomeAssistant,
init_integration: MockConfigEntry,
github_client: AsyncMock,
mock_config_entry: MockConfigEntry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test to ensure that all sensor entities have an icon definition."""
await setup_integration(hass, mock_config_entry)
entities = er.async_entries_for_config_entry(
entity_registry,
config_entry_id=init_integration.entry_id,
config_entry_id=mock_config_entry.entry_id,
)
icons = await icon.async_get_icons(hass, "entity", integrations=["github"])

View File

@@ -1,50 +1,36 @@
"""Test GitHub sensor."""
import json
from unittest.mock import AsyncMock
import pytest
from freezegun.api import FrozenDateTimeFactory
from homeassistant.components.github.const import DOMAIN, FALLBACK_UPDATE_INTERVAL
from homeassistant.components.github.const import FALLBACK_UPDATE_INTERVAL
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
from .common import TEST_REPOSITORY
from . import setup_integration
from tests.common import MockConfigEntry, async_fire_time_changed, async_load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.common import MockConfigEntry, async_fire_time_changed
TEST_SENSOR_ENTITY = "sensor.octocat_hello_world_latest_release"
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_sensor_updates_with_empty_release_array(
hass: HomeAssistant,
init_integration: MockConfigEntry,
aioclient_mock: AiohttpClientMocker,
github_client: AsyncMock,
mock_config_entry: MockConfigEntry,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test the sensor updates by default GitHub sensors."""
await setup_integration(hass, mock_config_entry)
state = hass.states.get(TEST_SENSOR_ENTITY)
assert state.state == "v1.0.0"
response_json = json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN))
response_json["data"]["repository"]["release"] = None
headers = json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN))
github_client.graphql.return_value.data["data"]["repository"]["release"] = None
aioclient_mock.clear_requests()
aioclient_mock.get(
f"https://api.github.com/repos/{TEST_REPOSITORY}/events",
json=[],
headers=headers,
)
aioclient_mock.post(
"https://api.github.com/graphql",
json=response_json,
headers=headers,
)
async_fire_time_changed(hass, dt_util.utcnow() + FALLBACK_UPDATE_INTERVAL)
freezer.tick(FALLBACK_UPDATE_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done()
new_state = hass.states.get(TEST_SENSOR_ENTITY)
assert new_state.state == "unavailable"
assert new_state.state == STATE_UNAVAILABLE

View File

@@ -484,14 +484,14 @@
'object_id_base': 'Ozone',
'options': dict({
}),
'original_device_class': None,
'original_device_class': <SensorDeviceClass.OZONE: 'ozone'>,
'original_icon': None,
'original_name': 'Ozone',
'platform': 'google_air_quality',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'ozone',
'translation_key': None,
'unique_id': 'o3_10.1_20.1',
'unit_of_measurement': 'ppb',
})
@@ -500,6 +500,7 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by Google Air Quality',
'device_class': 'ozone',
'friendly_name': 'Home Ozone',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'ppb',

View File

@@ -27,7 +27,11 @@ async def target_lights(hass: HomeAssistant) -> list[str]:
@pytest.fixture
async def target_switches(hass: HomeAssistant) -> list[str]:
"""Create multiple switch entities associated with different targets."""
"""Create multiple switch entities associated with different targets.
Note: The switches are used to ensure that only light entities are considered
in the condition evaluation and not other toggle entities.
"""
return (await target_entities(hass, "switch"))["included"]

View File

@@ -59,7 +59,7 @@ def mock_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
return MockConfigEntry(
domain=DOMAIN,
title=TITLE,
unique_id=54321,
unique_id="54321",
data={
"auth_implementation": DOMAIN,
"token": {

View File

@@ -74,7 +74,7 @@ async def test_full_flow(
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "test@microbees.com"
assert "result" in result
assert result["result"].unique_id == 54321
assert result["result"].unique_id == "54321"
assert "token" in result["result"].data
assert result["result"].data["token"]["access_token"] == "mock-access-token"
assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token"
@@ -197,7 +197,7 @@ async def test_config_reauth_wrong_account(
) -> None:
"""Test reauth with wrong account."""
await setup_integration(hass, config_entry)
microbees.return_value.getMyProfile.return_value.id = 12345
microbees.return_value.getMyProfile.return_value.id = "12345"
result = await config_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"

View File

@@ -0,0 +1,35 @@
"""Tests for the microBees component."""
from unittest.mock import patch
from homeassistant.components.microbees.const import DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
"""Test migrating a 1.1 config entry to 1.2."""
with patch(
"homeassistant.components.microbees.async_setup_entry", return_value=True
):
entry = MockConfigEntry(
domain=DOMAIN,
data={
"auth_implementation": DOMAIN,
"token": {
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
},
version=1,
minor_version=1,
unique_id=54321,
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
assert entry.version == 1
assert entry.minor_version == 2
assert entry.unique_id == "54321"

View File

@@ -244,7 +244,7 @@ async def test_config_reauth_wrong_account(
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"user_id": 12346,
"user_id": "12346",
},
)

View File

@@ -1,7 +1,7 @@
"""Tests for component initialisation."""
from datetime import timedelta
from unittest.mock import AsyncMock
from unittest.mock import AsyncMock, patch
from freezegun.api import FrozenDateTimeFactory
from monzopy import AuthorisationExpiredError
@@ -35,3 +35,29 @@ async def test_api_can_trigger_reauth(
assert flow["step_id"] == "reauth_confirm"
assert flow["handler"] == DOMAIN
assert flow["context"]["source"] == SOURCE_REAUTH
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
"""Test migrating a 1.1 config entry to 1.2."""
with patch("homeassistant.components.monzo.async_setup_entry", return_value=True):
entry = MockConfigEntry(
domain=DOMAIN,
data={
"auth_implementation": DOMAIN,
"token": {
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"user_id": "600",
},
},
version=1,
minor_version=1,
unique_id=600,
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
assert entry.version == 1
assert entry.minor_version == 2
assert entry.unique_id == "600"

View File

@@ -57,6 +57,7 @@ def mock_aiontfy() -> Generator[AsyncMock]:
actions=[],
attachment=None,
content_type=None,
sequence_id="Mc3otamDNcpJ",
)
resp.to_dict.return_value = {
@@ -74,6 +75,7 @@ def mock_aiontfy() -> Generator[AsyncMock]:
"actions": [],
"attachment": None,
"content_type": None,
"sequence_id": "Mc3otamDNcpJ",
}
async def mock_ws(

View File

@@ -59,6 +59,7 @@
'id': 'h6Y2hKA5sy0U',
'message': 'Hello',
'priority': 3,
'sequence_id': 'Mc3otamDNcpJ',
'tags': list([
'octopus',
]),

View File

@@ -101,6 +101,7 @@ async def test_event(
"time": datetime(2025, 3, 28, 17, 58, 46, tzinfo=UTC),
"title": "Title",
"topic": "mytopic",
"sequence_id": "Mc3otamDNcpJ",
}

View File

@@ -22,6 +22,7 @@ from homeassistant.components.ntfy.notify import (
ATTR_ICON,
ATTR_MARKDOWN,
ATTR_PRIORITY,
ATTR_SEQUENCE_ID,
ATTR_TAGS,
SERVICE_PUBLISH,
)
@@ -60,6 +61,7 @@ async def test_ntfy_publish(
ATTR_MARKDOWN: True,
ATTR_PRIORITY: "5",
ATTR_TAGS: ["partying_face", "grin"],
ATTR_SEQUENCE_ID: "Mc3otamDNcpJ",
},
blocking=True,
)
@@ -76,6 +78,7 @@ async def test_ntfy_publish(
markdown=True,
icon=URL("https://example.org/logo.png"),
delay="86430.0s",
sequence_id="Mc3otamDNcpJ",
)
)

View File

@@ -3107,7 +3107,6 @@ def test_device_class_converters_are_complete() -> None:
SensorDeviceClass.IRRADIANCE,
SensorDeviceClass.MOISTURE,
SensorDeviceClass.MONETARY,
SensorDeviceClass.NITROGEN_MONOXIDE,
SensorDeviceClass.NITROUS_OXIDE,
SensorDeviceClass.PH,
SensorDeviceClass.PM1,

View File

@@ -27,7 +27,11 @@ async def target_sirens(hass: HomeAssistant) -> list[str]:
@pytest.fixture
async def target_switches(hass: HomeAssistant) -> list[str]:
"""Create multiple switch entities associated with different targets."""
"""Create multiple switch entities associated with different targets.
Note: The switches are used to ensure that only siren entities are considered
in the condition evaluation and not other toggle entities.
"""
return (await target_entities(hass, "switch"))["included"]

View File

@@ -52,10 +52,13 @@ def make_test_trigger(*entities: str) -> dict:
async def async_trigger(
hass: HomeAssistant, entity_id: str, state: str | None = None
hass: HomeAssistant,
entity_id: str,
state: str | None = None,
attributes: dict | None = None,
) -> None:
"""Trigger a state change."""
hass.states.async_set(entity_id, state)
hass.states.async_set(entity_id, state, attributes)
await hass.async_block_till_done()

View File

@@ -236,8 +236,8 @@ BINARY_SENSOR_OPTIONS = {
"on",
{"one": "on", "two": "off"},
{},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
{},
),
(
@@ -458,8 +458,8 @@ async def test_config_flow(
(
"select",
{"state": "{{ states('select.one') }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
),
(
"update",
@@ -734,8 +734,8 @@ async def test_config_flow_device(
{"state": "{{ states('select.two') }}"},
["on", "off"],
{"one": "on", "two": "off"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
"state",
),
(
@@ -1606,8 +1606,8 @@ async def test_option_flow_sensor_preview_config_entry_removed(
(
"select",
{"state": "{{ states('select.one') }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}"},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
{"options": "{{ ['off', 'on', 'auto'] }}", "select_option": []},
),
(
"switch",

View File

@@ -405,10 +405,12 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None:
"name": "My template",
"state": "{{ 'on' }}",
"options": "{{ ['off', 'on', 'auto'] }}",
"select_option": [],
},
{
"state": "{{ 'on' }}",
"options": "{{ ['off', 'on', 'auto'] }}",
"select_option": [],
},
),
(

View File

@@ -5,13 +5,7 @@ from typing import Any
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant import setup
from homeassistant.components import number, template
from homeassistant.components.input_number import (
ATTR_VALUE as INPUT_NUMBER_ATTR_VALUE,
DOMAIN as INPUT_NUMBER_DOMAIN,
SERVICE_SET_VALUE as INPUT_NUMBER_SERVICE_SET_VALUE,
)
from homeassistant.components import number
from homeassistant.components.number import (
ATTR_MAX,
ATTR_MIN,
@@ -32,104 +26,60 @@ from homeassistant.const import (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import Context, HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.setup import async_setup_component
from .conftest import ConfigurationStyle, async_get_flow_preview_state
from .conftest import (
ConfigurationStyle,
TemplatePlatformSetup,
async_get_flow_preview_state,
async_trigger,
make_test_trigger,
setup_and_test_nested_unique_id,
setup_and_test_unique_id,
setup_entity,
)
from tests.common import MockConfigEntry, assert_setup_component, async_capture_events
from tests.common import MockConfigEntry
from tests.typing import WebSocketGenerator
_TEST_OBJECT_ID = "template_number"
_TEST_NUMBER = f"number.{_TEST_OBJECT_ID}"
# Represent for number's value
_VALUE_INPUT_NUMBER = "input_number.value"
# Represent for number's minimum
_MINIMUM_INPUT_NUMBER = "input_number.minimum"
# Represent for number's maximum
_MAXIMUM_INPUT_NUMBER = "input_number.maximum"
# Represent for number's step
_STEP_INPUT_NUMBER = "input_number.step"
# Config for `_VALUE_INPUT_NUMBER`
_VALUE_INPUT_NUMBER_CONFIG = {
"value": {
"min": 0.0,
"max": 100.0,
"name": "Value",
"step": 1.0,
"mode": "slider",
}
}
TEST_STATE_ENTITY_ID = "number.test_state"
TEST_AVAILABILITY_ENTITY_ID = "binary_sensor.test_availability"
TEST_STATE_TRIGGER = {
"trigger": {
"trigger": "state",
"entity_id": [TEST_STATE_ENTITY_ID, TEST_AVAILABILITY_ENTITY_ID],
TEST_MAXIMUM_ENTITY_ID = "sensor.maximum"
TEST_MINIMUM_ENTITY_ID = "sensor.minimum"
TEST_STATE_ENTITY_ID = "number.test_state"
TEST_STEP_ENTITY_ID = "sensor.step"
TEST_NUMBER = TemplatePlatformSetup(
number.DOMAIN,
None,
"template_number",
make_test_trigger(
TEST_AVAILABILITY_ENTITY_ID,
TEST_MAXIMUM_ENTITY_ID,
TEST_MINIMUM_ENTITY_ID,
TEST_STATE_ENTITY_ID,
TEST_STEP_ENTITY_ID,
),
)
TEST_SET_VALUE_ACTION = {
"action": "test.automation",
"data": {
"action": "set_value",
"caller": "{{ this.entity_id }}",
"value": "{{ value }}",
},
"variables": {"triggering_entity": "{{ trigger.entity_id }}"},
"action": [
{"event": "action_event", "event_data": {"what": "{{ triggering_entity }}"}}
],
}
TEST_REQUIRED = {"state": "0", "step": "1", "set_value": []}
async def async_setup_modern_format(
hass: HomeAssistant, count: int, number_config: dict[str, Any]
) -> None:
"""Do setup of number integration via new format."""
config = {"template": {"number": number_config}}
with assert_setup_component(count, template.DOMAIN):
assert await async_setup_component(
hass,
template.DOMAIN,
config,
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
async def async_setup_trigger_format(
hass: HomeAssistant, count: int, number_config: dict[str, Any]
) -> None:
"""Do setup of number integration via trigger format."""
config = {"template": {**TEST_STATE_TRIGGER, "number": number_config}}
with assert_setup_component(count, template.DOMAIN):
assert await async_setup_component(
hass,
template.DOMAIN,
config,
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
@pytest.fixture
async def setup_number(
hass: HomeAssistant,
count: int,
style: ConfigurationStyle,
number_config: dict[str, Any],
config: dict[str, Any],
) -> None:
"""Do setup of number integration."""
if style == ConfigurationStyle.MODERN:
await async_setup_modern_format(
hass, count, {"name": _TEST_OBJECT_ID, **number_config}
)
if style == ConfigurationStyle.TRIGGER:
await async_setup_trigger_format(
hass, count, {"name": _TEST_OBJECT_ID, **number_config}
)
await setup_entity(hass, TEST_NUMBER, style, count, config)
async def test_setup_config_entry(
@@ -166,294 +116,135 @@ async def test_setup_config_entry(
assert state == snapshot
async def test_missing_optional_config(hass: HomeAssistant) -> None:
"""Test: missing optional template is ok."""
with assert_setup_component(1, "template"):
assert await setup.async_setup_component(
hass,
"template",
@pytest.mark.parametrize(
("count", "config"),
[
(
1,
{
"template": {
"number": {
"state": "{{ 4 }}",
"set_value": {"service": "script.set_value"},
"step": "{{ 1 }}",
}
}
"state": "{{ 4 }}",
"set_value": {"service": "script.set_value"},
"step": "{{ 1 }}",
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
],
)
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
@pytest.mark.usefixtures("setup_number")
async def test_missing_optional_config(hass: HomeAssistant) -> None:
"""Test: missing optional template is ok."""
await async_trigger(hass, TEST_STATE_ENTITY_ID, "anything")
_verify(hass, 4, 1, 0.0, 100.0, None)
async def test_missing_required_keys(hass: HomeAssistant) -> None:
"""Test: missing required fields will fail."""
with assert_setup_component(0, "template"):
assert await setup.async_setup_component(
hass,
"template",
@pytest.mark.parametrize(
("count", "config"),
[
(
0,
{
"template": {
"number": {
"state": "{{ 4 }}",
}
}
"state": "{{ 4 }}",
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
],
)
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
@pytest.mark.usefixtures("setup_number")
async def test_missing_required_keys(hass: HomeAssistant) -> None:
"""Test: missing required fields will fail."""
assert hass.states.async_all("number") == []
async def test_all_optional_config(hass: HomeAssistant) -> None:
"""Test: including all optional templates is ok."""
with assert_setup_component(1, "template"):
assert await setup.async_setup_component(
hass,
"template",
@pytest.mark.parametrize(
("count", "config"),
[
(
1,
{
"template": {
"number": {
"state": "{{ 4 }}",
"set_value": {"service": "script.set_value"},
"min": "{{ 3 }}",
"max": "{{ 5 }}",
"step": "{{ 1 }}",
"unit_of_measurement": "beer",
}
}
"state": "{{ 4 }}",
"set_value": {"service": "script.set_value"},
"min": "{{ 3 }}",
"max": "{{ 5 }}",
"step": "{{ 1 }}",
"unit_of_measurement": "beer",
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
],
)
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
@pytest.mark.usefixtures("setup_number")
async def test_all_optional_config(hass: HomeAssistant) -> None:
"""Test: including all optional templates is ok."""
await async_trigger(hass, TEST_STATE_ENTITY_ID, "anything")
_verify(hass, 4, 1, 3, 5, "beer")
async def test_templates_with_entities(
@pytest.mark.parametrize(
("count", "config"),
[
(
1,
{
"state": f"{{{{ states('{TEST_STATE_ENTITY_ID}') | float(1.0) }}}}",
"step": f"{{{{ states('{TEST_STEP_ENTITY_ID}') | float(5.0) }}}}",
"min": f"{{{{ states('{TEST_MINIMUM_ENTITY_ID}') | float(0.0) }}}}",
"max": f"{{{{ states('{TEST_MAXIMUM_ENTITY_ID}') | float(100.0) }}}}",
"set_value": [TEST_SET_VALUE_ACTION],
},
)
],
)
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
@pytest.mark.usefixtures("setup_number")
async def test_template_number(
hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall]
) -> None:
"""Test templates with values from other entities."""
with assert_setup_component(4, "input_number"):
assert await setup.async_setup_component(
hass,
"input_number",
{
"input_number": {
**_VALUE_INPUT_NUMBER_CONFIG,
"step": {
"min": 0.0,
"max": 100.0,
"name": "Step",
"step": 1.0,
"mode": "slider",
},
"minimum": {
"min": 0.0,
"max": 100.0,
"name": "Minimum",
"step": 1.0,
"mode": "slider",
},
"maximum": {
"min": 0.0,
"max": 100.0,
"name": "Maximum",
"step": 1.0,
"mode": "slider",
},
}
},
)
with assert_setup_component(1, "template"):
assert await setup.async_setup_component(
hass,
"template",
{
"template": {
"unique_id": "b",
"number": {
"state": f"{{{{ states('{_VALUE_INPUT_NUMBER}') }}}}",
"step": f"{{{{ states('{_STEP_INPUT_NUMBER}') }}}}",
"min": f"{{{{ states('{_MINIMUM_INPUT_NUMBER}') }}}}",
"max": f"{{{{ states('{_MAXIMUM_INPUT_NUMBER}') }}}}",
"set_value": [
{
"service": "input_number.set_value",
"data_template": {
"entity_id": _VALUE_INPUT_NUMBER,
"value": "{{ value }}",
},
},
{
"service": "test.automation",
"data_template": {
"action": "set_value",
"caller": "{{ this.entity_id }}",
"value": "{{ value }}",
},
},
],
"optimistic": True,
"unique_id": "a",
},
}
},
)
hass.states.async_set(_VALUE_INPUT_NUMBER, 4)
hass.states.async_set(_STEP_INPUT_NUMBER, 1)
hass.states.async_set(_MINIMUM_INPUT_NUMBER, 3)
hass.states.async_set(_MAXIMUM_INPUT_NUMBER, 5)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
entry = entity_registry.async_get(_TEST_NUMBER)
assert entry
assert entry.unique_id == "b-a"
await async_trigger(hass, TEST_STATE_ENTITY_ID, 4)
await async_trigger(hass, TEST_STEP_ENTITY_ID, 1)
await async_trigger(hass, TEST_MINIMUM_ENTITY_ID, 3)
await async_trigger(hass, TEST_MAXIMUM_ENTITY_ID, 5)
_verify(hass, 4, 1, 3, 5, None)
await hass.services.async_call(
INPUT_NUMBER_DOMAIN,
INPUT_NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: _VALUE_INPUT_NUMBER, INPUT_NUMBER_ATTR_VALUE: 5},
blocking=True,
)
await hass.async_block_till_done()
await async_trigger(hass, TEST_STATE_ENTITY_ID, 5)
_verify(hass, 5, 1, 3, 5, None)
await hass.services.async_call(
INPUT_NUMBER_DOMAIN,
INPUT_NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: _STEP_INPUT_NUMBER, INPUT_NUMBER_ATTR_VALUE: 2},
blocking=True,
)
await hass.async_block_till_done()
await async_trigger(hass, TEST_STEP_ENTITY_ID, 2)
_verify(hass, 5, 2, 3, 5, None)
await hass.services.async_call(
INPUT_NUMBER_DOMAIN,
INPUT_NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: _MINIMUM_INPUT_NUMBER, INPUT_NUMBER_ATTR_VALUE: 2},
blocking=True,
)
await hass.async_block_till_done()
await async_trigger(hass, TEST_MINIMUM_ENTITY_ID, 2)
_verify(hass, 5, 2, 2, 5, None)
await hass.services.async_call(
INPUT_NUMBER_DOMAIN,
INPUT_NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: _MAXIMUM_INPUT_NUMBER, INPUT_NUMBER_ATTR_VALUE: 6},
blocking=True,
)
await hass.async_block_till_done()
await async_trigger(hass, TEST_MAXIMUM_ENTITY_ID, 6)
_verify(hass, 5, 2, 2, 6, None)
await hass.services.async_call(
NUMBER_DOMAIN,
NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: _TEST_NUMBER, NUMBER_ATTR_VALUE: 2},
{CONF_ENTITY_ID: TEST_NUMBER.entity_id, NUMBER_ATTR_VALUE: 2},
blocking=True,
)
_verify(hass, 2, 2, 2, 6, None)
# Check this variable can be used in set_value script
assert len(calls) == 1
assert calls[-1].data["action"] == "set_value"
assert calls[-1].data["caller"] == _TEST_NUMBER
assert calls[-1].data["caller"] == TEST_NUMBER.entity_id
assert calls[-1].data["value"] == 2
async def test_trigger_number(hass: HomeAssistant) -> None:
"""Test trigger based template number."""
events = async_capture_events(hass, "test_number_event")
assert await setup.async_setup_component(
hass,
"template",
{
"template": [
{"invalid": "config"},
# Config after invalid should still be set up
{
"unique_id": "listening-test-event",
"trigger": {"platform": "event", "event_type": "test_event"},
"number": [
{
"name": "Hello Name",
"unique_id": "hello_name-id",
"state": "{{ trigger.event.data.beers_drank }}",
"min": "{{ trigger.event.data.min_beers }}",
"max": "{{ trigger.event.data.max_beers }}",
"step": "{{ trigger.event.data.step }}",
"unit_of_measurement": "beer",
"set_value": {
"event": "test_number_event",
"event_data": {"entity_id": "{{ this.entity_id }}"},
},
"optimistic": True,
},
],
},
],
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("number.hello_name")
assert state is not None
assert state.state == STATE_UNKNOWN
assert state.attributes["min"] == 0.0
assert state.attributes["max"] == 100.0
assert state.attributes["step"] == 1.0
assert state.attributes["unit_of_measurement"] == "beer"
context = Context()
hass.bus.async_fire(
"test_event",
{
"beers_drank": 3,
"min_beers": 1.0,
"max_beers": 5.0,
"step": 0.5,
},
context=context,
)
await hass.async_block_till_done()
state = hass.states.get("number.hello_name")
assert state is not None
assert state.state == "3.0"
assert state.attributes["min"] == 1.0
assert state.attributes["max"] == 5.0
assert state.attributes["step"] == 0.5
await hass.services.async_call(
NUMBER_DOMAIN,
NUMBER_SERVICE_SET_VALUE,
{CONF_ENTITY_ID: "number.hello_name", NUMBER_ATTR_VALUE: 2},
blocking=True,
)
assert len(events) == 1
assert events[0].event_type == "test_number_event"
entity_id = events[0].data.get("entity_id")
assert entity_id is not None
assert entity_id == "number.hello_name"
await async_trigger(hass, TEST_STATE_ENTITY_ID, 2)
_verify(hass, 2, 2, 2, 6, None)
def _verify(
@@ -465,7 +256,7 @@ def _verify(
expected_unit_of_measurement: str | None,
) -> None:
"""Verify number's state."""
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
attributes = state.attributes
assert state.state == str(float(expected_value))
assert attributes.get(ATTR_STEP) == float(expected_step)
@@ -480,7 +271,7 @@ def _verify(
[(ConfigurationStyle.MODERN, ""), (ConfigurationStyle.TRIGGER, None)],
)
@pytest.mark.parametrize(
("number_config", "attribute", "expected"),
("config", "attribute", "expected"),
[
(
{
@@ -508,13 +299,12 @@ async def test_templated_optional_config(
initial_expected_state: str | None,
) -> None:
"""Test optional config templates."""
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert state.attributes.get(attribute) == initial_expected_state
state = hass.states.async_set(TEST_STATE_ENTITY_ID, "1")
await hass.async_block_till_done()
await async_trigger(hass, TEST_STATE_ENTITY_ID, "1")
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert state.attributes[attribute] == expected
@@ -567,7 +357,7 @@ async def test_device_id(
@pytest.mark.parametrize(
("count", "number_config"),
("count", "config"),
[
(
1,
@@ -587,26 +377,26 @@ async def test_optimistic(hass: HomeAssistant) -> None:
await hass.services.async_call(
number.DOMAIN,
number.SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: _TEST_NUMBER, "value": 4},
{ATTR_ENTITY_ID: TEST_NUMBER.entity_id, "value": 4},
blocking=True,
)
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert float(state.state) == 4
await hass.services.async_call(
number.DOMAIN,
number.SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: _TEST_NUMBER, "value": 2},
{ATTR_ENTITY_ID: TEST_NUMBER.entity_id, "value": 2},
blocking=True,
)
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert float(state.state) == 2
@pytest.mark.parametrize(
("count", "number_config"),
("count", "config"),
[
(
1,
@@ -628,16 +418,16 @@ async def test_not_optimistic(hass: HomeAssistant) -> None:
await hass.services.async_call(
number.DOMAIN,
number.SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: _TEST_NUMBER, "value": 4},
{ATTR_ENTITY_ID: TEST_NUMBER.entity_id, "value": 4},
blocking=True,
)
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert state.state == STATE_UNKNOWN
@pytest.mark.parametrize(
("count", "number_config"),
("count", "config"),
[
(
1,
@@ -656,34 +446,29 @@ async def test_not_optimistic(hass: HomeAssistant) -> None:
async def test_availability(hass: HomeAssistant) -> None:
"""Test configuration with optimistic state."""
hass.states.async_set(TEST_AVAILABILITY_ENTITY_ID, "on")
hass.states.async_set(TEST_STATE_ENTITY_ID, "4.0")
await hass.async_block_till_done()
state = hass.states.get(_TEST_NUMBER)
await async_trigger(hass, TEST_STATE_ENTITY_ID, "4.0")
await async_trigger(hass, TEST_AVAILABILITY_ENTITY_ID, "on")
state = hass.states.get(TEST_NUMBER.entity_id)
assert float(state.state) == 4
hass.states.async_set(TEST_AVAILABILITY_ENTITY_ID, "off")
await hass.async_block_till_done()
await async_trigger(hass, TEST_AVAILABILITY_ENTITY_ID, "off")
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert state.state == STATE_UNAVAILABLE
hass.states.async_set(TEST_STATE_ENTITY_ID, "2.0")
await hass.async_block_till_done()
await async_trigger(hass, TEST_STATE_ENTITY_ID, "2.0")
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert state.state == STATE_UNAVAILABLE
hass.states.async_set(TEST_AVAILABILITY_ENTITY_ID, "on")
await hass.async_block_till_done()
await async_trigger(hass, TEST_AVAILABILITY_ENTITY_ID, "on")
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert float(state.state) == 2
@pytest.mark.parametrize(
("count", "number_config"),
("count", "config"),
[
(
1,
@@ -702,16 +487,17 @@ async def test_availability(hass: HomeAssistant) -> None:
ConfigurationStyle.MODERN,
],
)
async def test_empty_action_config(hass: HomeAssistant, setup_number) -> None:
@pytest.mark.usefixtures("setup_number")
async def test_empty_action_config(hass: HomeAssistant) -> None:
"""Test configuration with empty script."""
await hass.services.async_call(
number.DOMAIN,
number.SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: _TEST_NUMBER, "value": 4},
{ATTR_ENTITY_ID: TEST_NUMBER.entity_id, "value": 4},
blocking=True,
)
state = hass.states.get(_TEST_NUMBER)
state = hass.states.get(TEST_NUMBER.entity_id)
assert float(state.state) == 4
@@ -734,3 +520,29 @@ async def test_flow_preview(
)
assert state["state"] == "0.0"
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
async def test_unique_id(
hass: HomeAssistant,
style: ConfigurationStyle,
) -> None:
"""Test unique_id option only creates one vacuum per id."""
await setup_and_test_unique_id(hass, TEST_NUMBER, style, TEST_REQUIRED, "{{ 0 }}")
@pytest.mark.parametrize(
"style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER]
)
async def test_nested_unique_id(
hass: HomeAssistant,
style: ConfigurationStyle,
entity_registry: er.EntityRegistry,
) -> None:
"""Test a template unique_id propagates to vacuum unique_ids."""
await setup_and_test_nested_unique_id(
hass, TEST_NUMBER, style, entity_registry, TEST_REQUIRED, "{{ 0 }}"
)

View File

@@ -6,14 +6,7 @@ import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant import setup
from homeassistant.components import select, template
from homeassistant.components.input_select import (
ATTR_OPTION as INPUT_SELECT_ATTR_OPTION,
ATTR_OPTIONS as INPUT_SELECT_ATTR_OPTIONS,
DOMAIN as INPUT_SELECT_DOMAIN,
SERVICE_SELECT_OPTION as INPUT_SELECT_SERVICE_SELECT_OPTION,
SERVICE_SET_OPTIONS,
)
from homeassistant.components import select
from homeassistant.components.select import (
ATTR_OPTION as SELECT_ATTR_OPTION,
ATTR_OPTIONS as SELECT_ATTR_OPTIONS,
@@ -31,77 +24,46 @@ from homeassistant.const import (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import Context, HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.setup import async_setup_component
from .conftest import ConfigurationStyle, async_get_flow_preview_state
from .conftest import (
ConfigurationStyle,
TemplatePlatformSetup,
async_get_flow_preview_state,
async_trigger,
make_test_trigger,
setup_and_test_nested_unique_id,
setup_and_test_unique_id,
setup_entity,
)
from tests.common import MockConfigEntry, assert_setup_component, async_capture_events
from tests.common import MockConfigEntry, assert_setup_component
from tests.conftest import WebSocketGenerator
_TEST_OBJECT_ID = "template_select"
_TEST_SELECT = f"select.{_TEST_OBJECT_ID}"
# Represent for select's current_option
_OPTION_INPUT_SELECT = "input_select.option"
TEST_STATE_ENTITY_ID = "select.test_state"
TEST_AVAILABILITY_ENTITY_ID = "binary_sensor.test_availability"
TEST_STATE_TRIGGER = {
"trigger": {
"trigger": "state",
"entity_id": [
_OPTION_INPUT_SELECT,
TEST_STATE_ENTITY_ID,
TEST_AVAILABILITY_ENTITY_ID,
],
},
"variables": {"triggering_entity": "{{ trigger.entity_id }}"},
"action": [
{"event": "action_event", "event_data": {"what": "{{ triggering_entity }}"}}
],
}
TEST_OPTIONS = {
"state": "test",
TEST_SELECT = TemplatePlatformSetup(
select.DOMAIN,
None,
"template_select",
make_test_trigger(TEST_STATE_ENTITY_ID, TEST_AVAILABILITY_ENTITY_ID),
)
TEST_OPTIONS_WITHOUT_STATE = {
"options": "{{ ['test', 'yes', 'no'] }}",
"select_option": [],
}
async def async_setup_modern_format(
hass: HomeAssistant, count: int, select_config: dict[str, Any]
) -> None:
"""Do setup of select integration via new format."""
config = {"template": {"select": select_config}}
with assert_setup_component(count, template.DOMAIN):
assert await async_setup_component(
hass,
template.DOMAIN,
config,
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
async def async_setup_trigger_format(
hass: HomeAssistant, count: int, select_config: dict[str, Any]
) -> None:
"""Do setup of select integration via trigger format."""
config = {"template": {**TEST_STATE_TRIGGER, "select": select_config}}
with assert_setup_component(count, template.DOMAIN):
assert await async_setup_component(
hass,
template.DOMAIN,
config,
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
TEST_OPTIONS = {"state": "test", **TEST_OPTIONS_WITHOUT_STATE}
TEST_OPTION_ACTION = {
"action": "test.automation",
"data": {
"action": "select_option",
"caller": "{{ this.entity_id }}",
"option": "{{ option }}",
},
}
@pytest.fixture
@@ -109,17 +71,10 @@ async def setup_select(
hass: HomeAssistant,
count: int,
style: ConfigurationStyle,
select_config: dict[str, Any],
config: dict[str, Any],
) -> None:
"""Do setup of select integration."""
if style == ConfigurationStyle.MODERN:
await async_setup_modern_format(
hass, count, {"name": _TEST_OBJECT_ID, **select_config}
)
if style == ConfigurationStyle.TRIGGER:
await async_setup_trigger_format(
hass, count, {"name": _TEST_OBJECT_ID, **select_config}
)
await setup_entity(hass, TEST_SELECT, style, count, config)
async def test_setup_config_entry(
@@ -136,6 +91,7 @@ async def test_setup_config_entry(
"template_type": "select",
"state": "{{ 'on' }}",
"options": "{{ ['off', 'on', 'auto'] }}",
"select_option": [],
},
title="My template",
)
@@ -149,27 +105,24 @@ async def test_setup_config_entry(
assert state == snapshot
@pytest.mark.parametrize("count", [1])
@pytest.mark.parametrize(
"config",
[
{
"state": "{{ 'a' }}",
"select_option": {"service": "script.select_option"},
"options": "{{ ['a', 'b'] }}",
},
],
)
@pytest.mark.parametrize(
"style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER]
)
@pytest.mark.usefixtures("setup_select")
async def test_missing_optional_config(hass: HomeAssistant) -> None:
"""Test: missing optional template is ok."""
with assert_setup_component(1, "template"):
assert await setup.async_setup_component(
hass,
"template",
{
"template": {
"select": {
"state": "{{ 'a' }}",
"select_option": {"service": "script.select_option"},
"options": "{{ ['a', 'b'] }}",
}
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
await async_trigger(hass, TEST_STATE_ENTITY_ID, "anything")
_verify(hass, "a", ["a", "b"])
@@ -202,231 +155,85 @@ async def test_multiple_configs(hass: HomeAssistant) -> None:
await hass.async_block_till_done()
_verify(hass, "a", ["a", "b"])
_verify(hass, "a", ["a", "b"], f"{_TEST_SELECT}_2")
_verify(hass, "a", ["a", "b"], f"{TEST_SELECT.entity_id}_2")
@pytest.mark.parametrize("count", [0])
@pytest.mark.parametrize(
"config",
[
{
"state": "{{ 'a' }}",
"select_option": {"service": "script.select_option"},
},
{
"state": "{{ 'a' }}",
"options": "{{ ['a', 'b'] }}",
},
],
)
@pytest.mark.parametrize(
"style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER]
)
@pytest.mark.usefixtures("setup_select")
async def test_missing_required_keys(hass: HomeAssistant) -> None:
"""Test: missing required fields will fail."""
with assert_setup_component(0, "select"):
assert await setup.async_setup_component(
hass,
"select",
{
"template": {
"select": {
"state": "{{ 'a' }}",
"select_option": {"service": "script.select_option"},
}
}
},
)
with assert_setup_component(0, "select"):
assert await setup.async_setup_component(
hass,
"select",
{
"template": {
"select": {
"state": "{{ 'a' }}",
"options": "{{ ['a', 'b'] }}",
}
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all("select") == []
async def test_templates_with_entities(
hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall]
) -> None:
@pytest.mark.parametrize(
("count", "config"),
[
(
1,
{
"options": "{{ state_attr('select.test_state', 'options') or [] }}",
"select_option": [TEST_OPTION_ACTION],
"state": "{{ states('select.test_state') }}",
},
)
],
)
@pytest.mark.parametrize(
"style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER]
)
@pytest.mark.usefixtures("setup_select")
async def test_template_select(hass: HomeAssistant, calls: list[ServiceCall]) -> None:
"""Test templates with values from other entities."""
with assert_setup_component(1, "input_select"):
assert await setup.async_setup_component(
hass,
"input_select",
{
"input_select": {
"option": {
"options": ["a", "b"],
"initial": "a",
"name": "Option",
},
}
},
)
with assert_setup_component(1, "template"):
assert await setup.async_setup_component(
hass,
"template",
{
"template": {
"unique_id": "b",
"select": {
"state": f"{{{{ states('{_OPTION_INPUT_SELECT}') }}}}",
"options": f"{{{{ state_attr('{_OPTION_INPUT_SELECT}', '{INPUT_SELECT_ATTR_OPTIONS}') }}}}",
"select_option": [
{
"service": "input_select.select_option",
"data_template": {
"entity_id": _OPTION_INPUT_SELECT,
"option": "{{ option }}",
},
},
{
"service": "test.automation",
"data_template": {
"action": "select_option",
"caller": "{{ this.entity_id }}",
"option": "{{ option }}",
},
},
],
"optimistic": True,
"unique_id": "a",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
entry = entity_registry.async_get(_TEST_SELECT)
assert entry
assert entry.unique_id == "b-a"
attributes = {"options": ["a", "b"]}
await async_trigger(hass, TEST_STATE_ENTITY_ID, "a", attributes)
_verify(hass, "a", ["a", "b"])
await hass.services.async_call(
INPUT_SELECT_DOMAIN,
INPUT_SELECT_SERVICE_SELECT_OPTION,
{CONF_ENTITY_ID: _OPTION_INPUT_SELECT, INPUT_SELECT_ATTR_OPTION: "b"},
blocking=True,
)
await hass.async_block_till_done()
await async_trigger(hass, TEST_STATE_ENTITY_ID, "b", attributes)
_verify(hass, "b", ["a", "b"])
await hass.services.async_call(
INPUT_SELECT_DOMAIN,
SERVICE_SET_OPTIONS,
{
CONF_ENTITY_ID: _OPTION_INPUT_SELECT,
INPUT_SELECT_ATTR_OPTIONS: ["a", "b", "c"],
},
blocking=True,
)
await hass.async_block_till_done()
attributes = {"options": ["a", "b", "c"]}
await async_trigger(hass, TEST_STATE_ENTITY_ID, "b", attributes)
_verify(hass, "b", ["a", "b", "c"])
await hass.services.async_call(
SELECT_DOMAIN,
SELECT_SERVICE_SELECT_OPTION,
{CONF_ENTITY_ID: _TEST_SELECT, SELECT_ATTR_OPTION: "c"},
{CONF_ENTITY_ID: TEST_SELECT.entity_id, SELECT_ATTR_OPTION: "c"},
blocking=True,
)
_verify(hass, "c", ["a", "b", "c"])
# Check this variable can be used in set_value script
assert len(calls) == 1
assert calls[-1].data["action"] == "select_option"
assert calls[-1].data["caller"] == _TEST_SELECT
assert calls[-1].data["caller"] == TEST_SELECT.entity_id
assert calls[-1].data["option"] == "c"
async def test_trigger_select(hass: HomeAssistant) -> None:
"""Test trigger based template select."""
events = async_capture_events(hass, "test_number_event")
action_events = async_capture_events(hass, "action_event")
assert await setup.async_setup_component(
hass,
"template",
{
"template": [
{"invalid": "config"},
# Config after invalid should still be set up
{
"unique_id": "listening-test-event",
"trigger": {"platform": "event", "event_type": "test_event"},
"variables": {"beer": "{{ trigger.event.data.beer }}"},
"action": [
{"event": "action_event", "event_data": {"beer": "{{ beer }}"}}
],
"select": [
{
"name": "Hello Name",
"unique_id": "hello_name-id",
"state": "{{ trigger.event.data.beer }}",
"options": "{{ trigger.event.data.beers }}",
"select_option": {
"event": "test_number_event",
"event_data": {
"entity_id": "{{ this.entity_id }}",
"beer": "{{ beer }}",
},
},
"optimistic": True,
},
],
},
],
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("select.hello_name")
assert state is not None
assert state.state == STATE_UNKNOWN
context = Context()
hass.bus.async_fire(
"test_event", {"beer": "duff", "beers": ["duff", "alamo"]}, context=context
)
await hass.async_block_till_done()
state = hass.states.get("select.hello_name")
assert state is not None
assert state.state == "duff"
assert state.attributes["options"] == ["duff", "alamo"]
assert len(action_events) == 1
assert action_events[0].event_type == "action_event"
beer = action_events[0].data.get("beer")
assert beer is not None
assert beer == "duff"
await hass.services.async_call(
SELECT_DOMAIN,
SELECT_SERVICE_SELECT_OPTION,
{CONF_ENTITY_ID: "select.hello_name", SELECT_ATTR_OPTION: "alamo"},
blocking=True,
)
assert len(events) == 1
assert events[0].event_type == "test_number_event"
entity_id = events[0].data.get("entity_id")
assert entity_id is not None
assert entity_id == "select.hello_name"
beer = events[0].data.get("beer")
assert beer is not None
assert beer == "duff"
await async_trigger(hass, TEST_STATE_ENTITY_ID, "c", attributes)
_verify(hass, "c", ["a", "b", "c"])
def _verify(
hass: HomeAssistant,
expected_current_option: str,
expected_options: list[str],
entity_name: str = _TEST_SELECT,
entity_name: str = TEST_SELECT.entity_id,
) -> None:
"""Verify select's state."""
state = hass.states.get(entity_name)
@@ -441,7 +248,7 @@ def _verify(
[(ConfigurationStyle.MODERN, ""), (ConfigurationStyle.TRIGGER, None)],
)
@pytest.mark.parametrize(
("select_config", "attribute", "expected"),
("config", "attribute", "expected"),
[
(
{
@@ -469,13 +276,13 @@ async def test_templated_optional_config(
initial_expected_state: str | None,
) -> None:
"""Test optional config templates."""
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.attributes.get(attribute) == initial_expected_state
state = hass.states.async_set(TEST_STATE_ENTITY_ID, "yes")
await hass.async_block_till_done()
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.attributes[attribute] == expected
@@ -506,6 +313,7 @@ async def test_device_id(
"template_type": "select",
"state": "{{ 'on' }}",
"options": "{{ ['off', 'on', 'auto'] }}",
"select_option": [],
"device_id": device_entry.id,
},
title="My template",
@@ -521,7 +329,7 @@ async def test_device_id(
@pytest.mark.parametrize(
("count", "select_config"),
("count", "config"),
[
(
1,
@@ -540,21 +348,22 @@ async def test_device_id(
ConfigurationStyle.MODERN,
],
)
async def test_empty_action_config(hass: HomeAssistant, setup_select) -> None:
@pytest.mark.usefixtures("setup_select")
async def test_empty_action_config(hass: HomeAssistant) -> None:
"""Test configuration with empty script."""
await hass.services.async_call(
select.DOMAIN,
select.SERVICE_SELECT_OPTION,
{ATTR_ENTITY_ID: _TEST_SELECT, "option": "a"},
{ATTR_ENTITY_ID: TEST_SELECT.entity_id, "option": "a"},
blocking=True,
)
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == "a"
@pytest.mark.parametrize(
("count", "select_config"),
("count", "config"),
[
(
1,
@@ -573,7 +382,7 @@ async def test_empty_action_config(hass: HomeAssistant, setup_select) -> None:
async def test_optimistic(hass: HomeAssistant) -> None:
"""Test configuration with optimistic state."""
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == STATE_UNKNOWN
# Ensure Trigger template entities update.
@@ -583,26 +392,26 @@ async def test_optimistic(hass: HomeAssistant) -> None:
await hass.services.async_call(
select.DOMAIN,
select.SERVICE_SELECT_OPTION,
{ATTR_ENTITY_ID: _TEST_SELECT, "option": "test"},
{ATTR_ENTITY_ID: TEST_SELECT.entity_id, "option": "test"},
blocking=True,
)
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == "test"
await hass.services.async_call(
select.DOMAIN,
select.SERVICE_SELECT_OPTION,
{ATTR_ENTITY_ID: _TEST_SELECT, "option": "yes"},
{ATTR_ENTITY_ID: TEST_SELECT.entity_id, "option": "yes"},
blocking=True,
)
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == "yes"
@pytest.mark.parametrize(
("count", "select_config"),
("count", "config"),
[
(
1,
@@ -629,16 +438,16 @@ async def test_not_optimistic(hass: HomeAssistant) -> None:
await hass.services.async_call(
select.DOMAIN,
select.SERVICE_SELECT_OPTION,
{ATTR_ENTITY_ID: _TEST_SELECT, "option": "test"},
{ATTR_ENTITY_ID: TEST_SELECT.entity_id, "option": "test"},
blocking=True,
)
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == STATE_UNKNOWN
@pytest.mark.parametrize(
("count", "select_config"),
("count", "config"),
[
(
1,
@@ -662,25 +471,25 @@ async def test_availability(hass: HomeAssistant) -> None:
hass.states.async_set(TEST_STATE_ENTITY_ID, "test")
await hass.async_block_till_done()
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == "test"
hass.states.async_set(TEST_AVAILABILITY_ENTITY_ID, "off")
await hass.async_block_till_done()
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == STATE_UNAVAILABLE
hass.states.async_set(TEST_STATE_ENTITY_ID, "yes")
await hass.async_block_till_done()
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == STATE_UNAVAILABLE
hass.states.async_set(TEST_AVAILABILITY_ENTITY_ID, "on")
await hass.async_block_till_done()
state = hass.states.get(_TEST_SELECT)
state = hass.states.get(TEST_SELECT.entity_id)
assert state.state == "yes"
@@ -698,3 +507,36 @@ async def test_flow_preview(
)
assert state["state"] == "test"
@pytest.mark.parametrize(
"style",
[ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER],
)
async def test_unique_id(
hass: HomeAssistant,
style: ConfigurationStyle,
) -> None:
"""Test unique_id option only creates one vacuum per id."""
await setup_and_test_unique_id(
hass, TEST_SELECT, style, TEST_OPTIONS_WITHOUT_STATE, "{{ 'test' }}"
)
@pytest.mark.parametrize(
"style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER]
)
async def test_nested_unique_id(
hass: HomeAssistant,
style: ConfigurationStyle,
entity_registry: er.EntityRegistry,
) -> None:
"""Test a template unique_id propagates to vacuum unique_ids."""
await setup_and_test_nested_unique_id(
hass,
TEST_SELECT,
style,
entity_registry,
TEST_OPTIONS_WITHOUT_STATE,
"{{ 'test' }}",
)

View File

@@ -19,6 +19,14 @@ from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
from tests.typing import RecorderInstanceContextManager
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceContextManager,
) -> None:
"""Set up recorder before hass fixture runs."""
def create_tibber_device(
@@ -158,21 +166,15 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry:
@pytest.fixture
def _tibber_patches() -> AsyncGenerator[tuple[MagicMock, MagicMock]]:
def tibber_mock() -> AsyncGenerator[MagicMock]:
"""Patch the Tibber libraries used by the integration."""
unique_user_id = "unique_user_id"
title = "title"
with (
patch(
"tibber.Tibber",
autospec=True,
) as mock_tibber,
patch(
"tibber.data_api.TibberDataAPI",
autospec=True,
) as mock_data_api_client,
):
with patch(
"tibber.Tibber",
autospec=True,
) as mock_tibber:
tibber_mock = mock_tibber.return_value
tibber_mock.update_info = AsyncMock(return_value=True)
tibber_mock.user_id = unique_user_id
@@ -180,24 +182,21 @@ def _tibber_patches() -> AsyncGenerator[tuple[MagicMock, MagicMock]]:
tibber_mock.send_notification = AsyncMock()
tibber_mock.rt_disconnect = AsyncMock()
tibber_mock.get_homes = MagicMock(return_value=[])
tibber_mock.set_access_token = MagicMock()
data_api_client_mock = mock_data_api_client.return_value
data_api_client_mock.get_all_devices = AsyncMock(return_value={})
data_api_client_mock.update_devices = AsyncMock(return_value={})
data_api_mock = MagicMock()
data_api_mock.get_all_devices = AsyncMock(return_value={})
data_api_mock.update_devices = AsyncMock(return_value={})
data_api_mock.get_userinfo = AsyncMock()
tibber_mock.data_api = data_api_mock
yield tibber_mock, data_api_client_mock
yield tibber_mock
@pytest.fixture
def tibber_mock(_tibber_patches: tuple[MagicMock, MagicMock]) -> MagicMock:
"""Return the patched Tibber connection mock."""
return _tibber_patches[0]
@pytest.fixture
def data_api_client_mock(_tibber_patches: tuple[MagicMock, MagicMock]) -> MagicMock:
def data_api_client_mock(tibber_mock: MagicMock) -> MagicMock:
"""Return the patched Tibber Data API client mock."""
return _tibber_patches[1]
return tibber_mock.data_api
@pytest.fixture

View File

@@ -19,7 +19,6 @@ from homeassistant.components.tibber.application_credentials import TOKEN_URL
from homeassistant.components.tibber.config_flow import (
DATA_API_DEFAULT_SCOPES,
ERR_CLIENT,
ERR_TIMEOUT,
ERR_TOKEN,
)
from homeassistant.components.tibber.const import AUTH_IMPLEMENTATION, DOMAIN
@@ -55,226 +54,164 @@ def _mock_tibber(
return tibber_mock
async def test_show_config_form(recorder_mock: Recorder, hass: HomeAssistant) -> None:
"""Test show configuration form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
@pytest.mark.parametrize(
("exception", "expected_error"),
[
(builtins.TimeoutError(), ERR_TIMEOUT),
(ClientError(), ERR_CLIENT),
(InvalidLoginError(401), ERR_TOKEN),
(RetryableHttpExceptionError(503), ERR_CLIENT),
(FatalHttpExceptionError(404), ERR_CLIENT),
],
)
async def test_graphql_step_exceptions(
async def test_oauth_create_entry_abort_exceptions(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
exception: Exception,
expected_error: str,
) -> None:
"""Validate GraphQL errors are surfaced."""
"""Validate fatal errors during OAuth finalization abort the flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
_mock_tibber(tibber_mock, update_side_effect=exception)
flow_result = await handler.async_oauth_create_entry(
{CONF_TOKEN: {CONF_ACCESS_TOKEN: "rest-token"}}
)
assert flow_result["type"] is FlowResultType.ABORT
assert flow_result["reason"] == expected_error
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
@pytest.mark.parametrize(
"exception",
[
builtins.TimeoutError(),
ClientError(),
RetryableHttpExceptionError(503),
],
)
async def test_oauth_create_entry_connection_error_retry(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
exception: Exception,
) -> None:
"""Validate transient connection errors show retry form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock, update_side_effect=exception)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "invalid"}
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
assert result["errors"][CONF_ACCESS_TOKEN] == expected_error
assert result["step_id"] == "connection_error"
async def test_flow_entry_already_exists(
recorder_mock: Recorder,
hass: HomeAssistant,
config_entry,
tibber_mock: MagicMock,
) -> None:
"""Test user input for config_entry that already exists."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock, user_id="tibber")
tibber_mock.update_info.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
result["flow_id"], user_input={}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
async def test_reauth_flow_steps(
recorder_mock: Recorder,
hass: HomeAssistant,
config_entry: MockConfigEntry,
) -> None:
"""Test the reauth flow goes through reauth_confirm to user step."""
reauth_flow = await config_entry.start_reauth_flow(hass)
assert reauth_flow["type"] is FlowResultType.FORM
assert reauth_flow["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(reauth_flow["flow_id"])
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
reauth_flow["flow_id"],
user_input={},
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
async def test_oauth_create_entry_missing_configuration(
recorder_mock: Recorder,
hass: HomeAssistant,
) -> None:
"""Abort OAuth finalize if GraphQL step did not run."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
flow_result = await handler.async_oauth_create_entry(
{CONF_TOKEN: {CONF_ACCESS_TOKEN: "rest-token"}}
)
assert flow_result["type"] is FlowResultType.ABORT
assert flow_result["reason"] == "missing_configuration"
async def test_oauth_create_entry_cannot_connect_userinfo(
recorder_mock: Recorder,
hass: HomeAssistant,
data_api_client_mock: MagicMock,
) -> None:
"""Abort OAuth finalize when Data API userinfo cannot be retrieved."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
handler._access_token = "graphql-token"
data_api_client_mock.get_userinfo = AsyncMock(side_effect=ClientError())
flow_result = await handler.async_oauth_create_entry(
{CONF_TOKEN: {CONF_ACCESS_TOKEN: "rest-token"}}
)
assert flow_result["type"] is FlowResultType.ABORT
assert flow_result["reason"] == "cannot_connect"
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Mock Name"
async def test_data_api_requires_credentials(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
) -> None:
"""Abort when OAuth credentials are missing."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "missing_credentials"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_data_api_extra_authorize_scope(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
) -> None:
"""Ensure the OAuth implementation requests Tibber scopes."""
with patch("homeassistant.components.recorder.async_setup", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
assert handler.extra_authorize_data["scope"] == " ".join(
DATA_API_DEFAULT_SCOPES
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
assert handler.extra_authorize_data["scope"] == " ".join(DATA_API_DEFAULT_SCOPES)
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_full_flow_success(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
data_api_client_mock: MagicMock,
) -> None:
"""Test configuring Tibber via GraphQL + OAuth."""
with patch("homeassistant.components.recorder.async_setup", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
"""Test configuring Tibber via OAuth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "graphql-token"}
)
_mock_tibber(tibber_mock)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
data_api_client_mock.get_userinfo = AsyncMock(
return_value={"name": "Mock Name"}
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.CREATE_ENTRY
data = result["data"]
assert data[CONF_TOKEN]["access_token"] == "mock-access-token"
assert data[CONF_ACCESS_TOKEN] == "graphql-token"
assert data[AUTH_IMPLEMENTATION] == DOMAIN
assert result["title"] == "Mock Name"
assert result["type"] is FlowResultType.CREATE_ENTRY
data = result["data"]
assert data[CONF_TOKEN]["access_token"] == "mock-access-token"
assert data[AUTH_IMPLEMENTATION] == DOMAIN
assert result["title"] == "Mock Name"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_data_api_abort_when_already_configured(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Ensure only a single Data API entry can be configured."""
@@ -283,7 +220,6 @@ async def test_data_api_abort_when_already_configured(
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "existing"},
CONF_ACCESS_TOKEN: "stored-graphql",
},
unique_id="unique_user_id",
title="Existing",
@@ -295,9 +231,133 @@ async def test_data_api_abort_when_already_configured(
)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "new-token"}
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_reauth_flow_success(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Test successful reauthentication flow."""
existing_entry = MockConfigEntry(
domain=DOMAIN,
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "old-token"},
},
unique_id="unique_user_id",
title="Existing",
)
existing_entry.add_to_hass(hass)
result = await existing_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
_mock_tibber(tibber_mock)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "new-access-token",
"refresh_token": "new-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert existing_entry.data[CONF_TOKEN]["access_token"] == "new-access-token"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_reauth_flow_wrong_account(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Test reauthentication with wrong account aborts."""
existing_entry = MockConfigEntry(
domain=DOMAIN,
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "old-token"},
},
unique_id="original_user_id",
title="Existing",
)
existing_entry.add_to_hass(hass)
result = await existing_entry.start_reauth_flow(hass)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
# Mock a different user_id than the existing entry
_mock_tibber(tibber_mock, user_id="different_user_id")
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "new-access-token",
"refresh_token": "new-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "wrong_account"

Some files were not shown because too many files have changed in this diff Show More