mirror of
https://github.com/home-assistant/core.git
synced 2026-01-21 23:17:00 +01:00
Compare commits
64 Commits
drop-ignor
...
ssl_contex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b6bb157141 | ||
|
|
d589b9eb8d | ||
|
|
256d47775b | ||
|
|
c9eae821e8 | ||
|
|
7c4cdd57b6 | ||
|
|
8c02268638 | ||
|
|
8436676e67 | ||
|
|
df10ffd508 | ||
|
|
02218fab7b | ||
|
|
6af5698645 | ||
|
|
75db2cde40 | ||
|
|
329dd05434 | ||
|
|
53c53d03e0 | ||
|
|
360b394d03 | ||
|
|
a663d55632 | ||
|
|
3fd266a513 | ||
|
|
442c1d6242 | ||
|
|
0e2aae02f6 | ||
|
|
3227a6e49f | ||
|
|
9d0cfb628b | ||
|
|
4578fe0260 | ||
|
|
0d92708108 | ||
|
|
cceb50071b | ||
|
|
62f296c9dd | ||
|
|
ea1f280494 | ||
|
|
67108a2fc8 | ||
|
|
1ccbd5124e | ||
|
|
818af90a7b | ||
|
|
23bc78fa25 | ||
|
|
0b1cc7638f | ||
|
|
c291a2fbc1 | ||
|
|
7379a4ff4b | ||
|
|
ddcf5cb749 | ||
|
|
4b10a542b0 | ||
|
|
beea9fa74b | ||
|
|
ce8fd16456 | ||
|
|
2172d15489 | ||
|
|
0cfa0ed670 | ||
|
|
f6839913d8 | ||
|
|
a7cfac2618 | ||
|
|
8fa01497ee | ||
|
|
e077c65a77 | ||
|
|
7c49656fa8 | ||
|
|
1730479c8d | ||
|
|
bc28c8fd3c | ||
|
|
c3616fd5df | ||
|
|
6b97f2ac06 | ||
|
|
deefcbcbe4 | ||
|
|
e84aeb9f99 | ||
|
|
ade3d8a657 | ||
|
|
a65d9032ff | ||
|
|
b950a4eaf4 | ||
|
|
3fe91751f5 | ||
|
|
6ee58b96ca | ||
|
|
d1404e7905 | ||
|
|
7c34191813 | ||
|
|
7540d04779 | ||
|
|
d828130670 | ||
|
|
2ec6c08bd7 | ||
|
|
48852bab7a | ||
|
|
7d370f4513 | ||
|
|
9d97791faf | ||
|
|
4fe8982b68 | ||
|
|
8248ade211 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -555,14 +555,14 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint homeassistant
|
||||
pylint --ignore-missing-annotations=y homeassistant
|
||||
- name: Run pylint (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||
pylint --ignore-missing-annotations=y homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||
|
||||
pylint-tests:
|
||||
name: Check pylint on tests
|
||||
|
||||
@@ -52,7 +52,7 @@ class AdGuardHomeEntity(Entity):
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device information about this AdGuard Home instance."""
|
||||
if self._entry.source == SOURCE_HASSIO:
|
||||
config_url = "homeassistant://hassio/ingress/a0d7b954_adguard"
|
||||
config_url = "homeassistant://app/a0d7b954_adguard"
|
||||
elif self.adguard.tls:
|
||||
config_url = f"https://{self.adguard.host}:{self.adguard.port}"
|
||||
else:
|
||||
|
||||
@@ -127,6 +127,7 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"assist_satellite",
|
||||
"fan",
|
||||
"light",
|
||||
"siren",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
@@ -601,6 +602,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced labels."""
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_labels(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
@@ -610,6 +615,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced floors."""
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_floors(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
@@ -619,6 +628,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced areas."""
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_areas(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.9.0"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "deconz"
|
||||
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://hassio/ingress/core_deconz"
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://app/core_deconz"
|
||||
|
||||
CONF_BRIDGE_ID = "bridgeid"
|
||||
CONF_GROUP_ID_BASE = "group_id_base"
|
||||
|
||||
@@ -1034,7 +1034,7 @@ def _async_setup_device_registry(
|
||||
and dashboard.data
|
||||
and dashboard.data.get(device_info.name)
|
||||
):
|
||||
configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}"
|
||||
configuration_url = f"homeassistant://app/{dashboard.addon_slug}"
|
||||
|
||||
manufacturer = "espressif"
|
||||
if device_info.manufacturer:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.11"]
|
||||
"requirements": ["pyfirefly==0.1.12"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL
|
||||
from .entity import AqualinkEntity
|
||||
@@ -66,7 +67,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
aqualink = AqualinkClient(username, password, httpx_client=get_async_client(hass))
|
||||
aqualink = AqualinkClient(
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2),
|
||||
)
|
||||
try:
|
||||
await aqualink.login()
|
||||
except AqualinkServiceException as login_exception:
|
||||
|
||||
@@ -15,6 +15,7 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -36,7 +37,11 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with AqualinkClient(
|
||||
username, password, httpx_client=get_async_client(self.hass)
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(
|
||||
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
),
|
||||
):
|
||||
pass
|
||||
except AqualinkServiceUnauthorizedException:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kostal"],
|
||||
"requirements": ["pykoplenti==1.3.0"]
|
||||
"requirements": ["pykoplenti==1.5.0"]
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"title": "Lawn mower",
|
||||
"triggers": {
|
||||
"docked": {
|
||||
"description": "Triggers after one or more lawn mowers return to dock.",
|
||||
"description": "Triggers after one or more lawn mowers have returned to dock.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
|
||||
@@ -1,24 +1,47 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityNumericalStateAttributeChangedTriggerBase,
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase,
|
||||
Trigger,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from . import ATTR_BRIGHTNESS
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def _convert_uint8_to_percentage(value: Any) -> float:
|
||||
"""Convert a uint8 value (0-255) to a percentage (0-100)."""
|
||||
return (float(value) / 255.0) * 100.0
|
||||
|
||||
|
||||
class BrightnessChangedTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for brightness changed."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
class BrightnessCrossedThresholdTrigger(
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase
|
||||
):
|
||||
"""Trigger for brightness crossed threshold."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"brightness_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_changed": BrightnessChangedTrigger,
|
||||
"brightness_crossed_threshold": BrightnessCrossedThresholdTrigger,
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
@@ -22,7 +22,10 @@
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
max: 100
|
||||
min: 0
|
||||
mode: box
|
||||
unit_of_measurement: "%"
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
|
||||
@@ -50,7 +50,7 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Check connection to the Mealie API."""
|
||||
assert self.host is not None
|
||||
|
||||
if "/hassio/ingress/" in self.host:
|
||||
if "/app/" in self.host:
|
||||
return {"base": "ingress_url"}, None
|
||||
|
||||
client = MealieClient(
|
||||
|
||||
@@ -73,15 +73,6 @@ SHARED_OPTIONS = [
|
||||
CONF_STATE_TOPIC,
|
||||
]
|
||||
|
||||
MQTT_ORIGIN_INFO_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_SW_VERSION): cv.string,
|
||||
vol.Optional(CONF_SUPPORT_URL): cv.configuration_url,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
_MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -125,7 +125,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -373,7 +373,7 @@ class NumberDeviceClass(StrEnum):
|
||||
SULPHUR_DIOXIDE = "sulphur_dioxide"
|
||||
"""Amount of SO2.
|
||||
|
||||
Unit of measurement: `μg/m³`
|
||||
Unit of measurement: `ppb` (parts per billion), `μg/m³`
|
||||
"""
|
||||
|
||||
TEMPERATURE = "temperature"
|
||||
@@ -483,6 +483,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
@@ -545,7 +546,10 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
},
|
||||
NumberDeviceClass.SOUND_PRESSURE: set(UnitOfSoundPressure),
|
||||
NumberDeviceClass.SPEED: {*UnitOfSpeed, *UnitOfVolumetricFlux},
|
||||
NumberDeviceClass.SULPHUR_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.SULPHUR_DIOXIDE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.TEMPERATURE: set(UnitOfTemperature),
|
||||
NumberDeviceClass.TEMPERATURE_DELTA: set(UnitOfTemperature),
|
||||
NumberDeviceClass.VOLATILE_ORGANIC_COMPOUNDS: {
|
||||
|
||||
@@ -8,6 +8,9 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -158,7 +158,7 @@ MODEL_NAMES = [ # https://ollama.com/library
|
||||
"yi",
|
||||
"zephyr",
|
||||
]
|
||||
DEFAULT_MODEL = "qwen3:4b"
|
||||
DEFAULT_MODEL = "qwen3:4b-instruct"
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Ollama Conversation"
|
||||
DEFAULT_AI_TASK_NAME = "Ollama AI Task"
|
||||
|
||||
@@ -178,6 +178,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
file,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
session=async_get_clientsession(self._hass),
|
||||
smart_chunk_size=True,
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.1.0"]
|
||||
"requirements": ["onedrive-personal-sdk==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioqsw"],
|
||||
"requirements": ["aioqsw==0.4.2"]
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"codeowners": ["@rabbit-air"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rabbitair",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-rabbitair==0.0.8"],
|
||||
"zeroconf": ["_rabbitair._udp.local."]
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/radiotherm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["radiotherm"],
|
||||
"requirements": ["radiotherm==2.1.0"]
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioraven==0.7.1"],
|
||||
"usb": [
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rapt_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["rapt-ble==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@ from homeassistant.util.unit_conversion import (
|
||||
ReactiveEnergyConverter,
|
||||
ReactivePowerConverter,
|
||||
SpeedConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
UnitlessRatioConverter,
|
||||
@@ -225,6 +226,7 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
TemperatureDeltaConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
]
|
||||
|
||||
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||
|
||||
@@ -38,6 +38,7 @@ from homeassistant.util.unit_conversion import (
|
||||
ReactiveEnergyConverter,
|
||||
ReactivePowerConverter,
|
||||
SpeedConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
UnitlessRatioConverter,
|
||||
@@ -94,6 +95,9 @@ UNIT_SCHEMA = vol.Schema(
|
||||
vol.Optional("reactive_energy"): vol.In(ReactiveEnergyConverter.VALID_UNITS),
|
||||
vol.Optional("reactive_power"): vol.In(ReactivePowerConverter.VALID_UNITS),
|
||||
vol.Optional("speed"): vol.In(SpeedConverter.VALID_UNITS),
|
||||
vol.Optional("sulphur_dioxide"): vol.In(
|
||||
SulphurDioxideConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("temperature"): vol.In(TemperatureConverter.VALID_UNITS),
|
||||
vol.Optional("temperature_delta"): vol.In(
|
||||
TemperatureDeltaConverter.VALID_UNITS
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@ashionky"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/refoss",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["refoss-ha==1.2.5"],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rehlko",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiokem"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@jimmyd-be"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renson",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["renson-endura-delta==1.7.2"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@danielhiversen", "@elupus", "@RobBie1221"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rfxtrx",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["RFXtrx"],
|
||||
"requirements": ["pyRFXtrx==0.31.1"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyrituals"],
|
||||
"requirements": ["pyrituals==0.0.7"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@xeniter"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/romy",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["romy==0.0.10"],
|
||||
"zeroconf": ["_aicu-http._tcp.local."]
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["paho_mqtt", "roombapy"],
|
||||
"requirements": ["roombapy==1.9.0"],
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@pavoni"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/roon",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["roonapi"],
|
||||
"requirements": ["roonapi==0.1.6"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rova",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["rova"],
|
||||
"requirements": ["rova==0.4.1"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@noahhusby"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvi_gateway",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioruuvigateway==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvitag_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["ruuvitag-ble==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@OnFreund", "@elad-bar", "@maorcc"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rympro",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyrympro==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@shaiu", "@jpbede"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sabnzbd",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysabnzbd"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -44,6 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: LeilSaunaConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LeilSaunaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
entry.runtime_data.client.close()
|
||||
await entry.runtime_data.client.async_close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -51,7 +51,7 @@ async def validate_input(data: dict[str, Any]) -> None:
|
||||
# Try to read data to verify communication
|
||||
await client.async_get_data()
|
||||
finally:
|
||||
client.close()
|
||||
await client.async_close()
|
||||
|
||||
|
||||
class LeilSaunaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysaunum"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysaunum==0.2.0"]
|
||||
"requirements": ["pysaunum==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@dknowles2"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/schlage",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyschlage==2025.9.0"]
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.8"]
|
||||
|
||||
@@ -68,6 +68,7 @@ from homeassistant.util.unit_conversion import (
|
||||
ReactiveEnergyConverter,
|
||||
ReactivePowerConverter,
|
||||
SpeedConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
UnitlessRatioConverter,
|
||||
@@ -158,7 +159,7 @@ class SensorDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -409,7 +410,7 @@ class SensorDeviceClass(StrEnum):
|
||||
SULPHUR_DIOXIDE = "sulphur_dioxide"
|
||||
"""Amount of SO2.
|
||||
|
||||
Unit of measurement: `μg/m³`
|
||||
Unit of measurement: `ppb` (parts per billion), `μg/m³`
|
||||
"""
|
||||
|
||||
TEMPERATURE = "temperature"
|
||||
@@ -569,6 +570,7 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
|
||||
SensorDeviceClass.PRESSURE: PressureConverter,
|
||||
SensorDeviceClass.REACTIVE_ENERGY: ReactiveEnergyConverter,
|
||||
SensorDeviceClass.REACTIVE_POWER: ReactivePowerConverter,
|
||||
SensorDeviceClass.SULPHUR_DIOXIDE: SulphurDioxideConcentrationConverter,
|
||||
SensorDeviceClass.SPEED: SpeedConverter,
|
||||
SensorDeviceClass.TEMPERATURE: TemperatureConverter,
|
||||
SensorDeviceClass.TEMPERATURE_DELTA: TemperatureDeltaConverter,
|
||||
@@ -595,6 +597,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.BATTERY: {PERCENTAGE},
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
SensorDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
@@ -657,7 +660,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
},
|
||||
SensorDeviceClass.SOUND_PRESSURE: set(UnitOfSoundPressure),
|
||||
SensorDeviceClass.SPEED: {*UnitOfSpeed, *UnitOfVolumetricFlux},
|
||||
SensorDeviceClass.SULPHUR_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.SULPHUR_DIOXIDE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
SensorDeviceClass.TEMPERATURE: set(UnitOfTemperature),
|
||||
SensorDeviceClass.TEMPERATURE_DELTA: set(UnitOfTemperature),
|
||||
SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS: {
|
||||
|
||||
17
homeassistant/components/siren/condition.py
Normal file
17
homeassistant/components/siren/condition.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides conditions for sirens."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
|
||||
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the siren conditions."""
|
||||
return CONDITIONS
|
||||
17
homeassistant/components/siren/conditions.yaml
Normal file
17
homeassistant/components/siren/conditions.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: siren
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:bullhorn-outline"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:bullhorn"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:bullhorn"
|
||||
|
||||
@@ -1,8 +1,32 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted sirens.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted sirens to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more sirens are off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::siren::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::siren::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a siren is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more sirens are on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::siren::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::siren::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a siren is on"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::siren::title%]",
|
||||
@@ -18,6 +42,12 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -18,6 +18,9 @@
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
"create_entry": {
|
||||
"default": "Successfully authenticated with Spotify."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
|
||||
@@ -185,6 +185,9 @@ async def make_device_data(
|
||||
"Smart Lock Lite",
|
||||
"Smart Lock Pro",
|
||||
"Smart Lock Ultra",
|
||||
"Smart Lock Vision",
|
||||
"Smart Lock Vision Pro",
|
||||
"Smart Lock Pro Wifi",
|
||||
]:
|
||||
coordinator = await coordinator_for_device(
|
||||
hass, entry, api, device, coordinators_by_id
|
||||
|
||||
@@ -92,6 +92,18 @@ BINARY_SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Vision": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Vision Pro": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Pro Wifi": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Curtain": (CALIBRATION_DESCRIPTION,),
|
||||
"Curtain3": (CALIBRATION_DESCRIPTION,),
|
||||
"Roller Shade": (CALIBRATION_DESCRIPTION,),
|
||||
|
||||
@@ -46,7 +46,7 @@ class SwitchBotCloudLock(SwitchBotCloudEntity, LockEntity):
|
||||
"""Set attributes from coordinator data."""
|
||||
if coord_data := self.coordinator.data:
|
||||
self._attr_is_locked = coord_data["lockState"] == "locked"
|
||||
if self.__model in LockV2Commands.get_supported_devices():
|
||||
if self.__model != "Smart Lock Lite":
|
||||
self._attr_supported_features = LockEntityFeature.OPEN
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -225,6 +225,9 @@ SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
"Smart Lock Lite": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Pro": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Ultra": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Vision": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Vision Pro": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Pro Wifi": (BATTERY_DESCRIPTION,),
|
||||
"Relay Switch 2PM": (
|
||||
RELAY_SWITCH_2PM_POWER_DESCRIPTION,
|
||||
RELAY_SWITCH_2PM_VOLTAGE_DESCRIPTION,
|
||||
|
||||
@@ -165,7 +165,7 @@ class DeviceListener(SharingDeviceListener):
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None = None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None = None,
|
||||
) -> None:
|
||||
"""Update device status with optional DP timestamps."""
|
||||
LOGGER.debug(
|
||||
|
||||
@@ -471,9 +471,11 @@ class TuyaBinarySensorEntity(TuyaEntity, BinarySensorEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -57,7 +57,7 @@ class TuyaEntity(Entity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -218,10 +218,10 @@ class TuyaEventEntity(TuyaEntity, EventEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
) or not (event_data := self._dpcode_wrapper.read_device_status(self.device)):
|
||||
return
|
||||
|
||||
|
||||
@@ -31,7 +31,10 @@ class DeviceWrapper[T]:
|
||||
options: list[str]
|
||||
|
||||
def skip_update(
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> bool:
|
||||
"""Determine if the wrapper should skip an update.
|
||||
|
||||
@@ -62,7 +65,10 @@ class DPCodeWrapper(DeviceWrapper):
|
||||
self.dpcode = dpcode
|
||||
|
||||
def skip_update(
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> bool:
|
||||
"""Determine if the wrapper should skip an update.
|
||||
|
||||
|
||||
@@ -554,10 +554,12 @@ class TuyaNumberEntity(TuyaEntity, NumberEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -410,10 +410,12 @@ class TuyaSelectEntity(TuyaEntity, SelectEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -1853,9 +1853,11 @@ class TuyaSensorEntity(TuyaEntity, SensorEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -110,10 +110,12 @@ class TuyaSirenEntity(TuyaEntity, SirenEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -1043,10 +1043,12 @@ class TuyaSwitchEntity(TuyaEntity, SwitchEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -140,10 +140,12 @@ class TuyaValveEntity(TuyaEntity, ValveEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"wrong_account": "Wrong account: Please authenticate with {username}."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"description": "The Twitch integration needs to re-authenticate your account",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["waterfurnace"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["waterfurnace==1.2.0"]
|
||||
"requirements": ["waterfurnace==1.4.0"]
|
||||
}
|
||||
|
||||
@@ -21,6 +21,9 @@
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found a Withings device on your network. Be aware that the setup of Withings is more complicated than many other integrations. Press **Submit** to continue setting up Withings."
|
||||
|
||||
@@ -47,6 +47,9 @@ async def async_setup_entry(
|
||||
class WyomingTtsProvider(tts.TextToSpeechEntity):
|
||||
"""Wyoming text-to-speech provider."""
|
||||
|
||||
_attr_default_options = {}
|
||||
_attr_supported_options = [tts.ATTR_AUDIO_OUTPUT, tts.ATTR_VOICE, ATTR_SPEAKER]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -78,38 +81,13 @@ class WyomingTtsProvider(tts.TextToSpeechEntity):
|
||||
self._voices[language], key=lambda v: v.name
|
||||
)
|
||||
|
||||
self._supported_languages: list[str] = list(voice_languages)
|
||||
self._attr_supported_languages = list(voice_languages)
|
||||
if self._attr_supported_languages:
|
||||
self._attr_default_language = self._attr_supported_languages[0]
|
||||
|
||||
self._attr_name = self._tts_service.name
|
||||
self._attr_unique_id = f"{config_entry.entry_id}-tts"
|
||||
|
||||
@property
|
||||
def default_language(self):
|
||||
"""Return default language."""
|
||||
if not self._supported_languages:
|
||||
return None
|
||||
|
||||
return self._supported_languages[0]
|
||||
|
||||
@property
|
||||
def supported_languages(self):
|
||||
"""Return list of supported languages."""
|
||||
return self._supported_languages
|
||||
|
||||
@property
|
||||
def supported_options(self):
|
||||
"""Return list of supported options like voice, emotion."""
|
||||
return [
|
||||
tts.ATTR_AUDIO_OUTPUT,
|
||||
tts.ATTR_VOICE,
|
||||
ATTR_SPEAKER,
|
||||
]
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""Return a dict include default options."""
|
||||
return {}
|
||||
|
||||
@callback
|
||||
def async_get_supported_voices(self, language: str) -> list[tts.Voice] | None:
|
||||
"""Return a list of supported voices for a language."""
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found an Xbox device on your network. Press **Submit** to continue setting up the Xbox integration.",
|
||||
|
||||
@@ -17,6 +17,9 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"channels": {
|
||||
"data": { "channels": "YouTube channels" },
|
||||
|
||||
@@ -5375,7 +5375,7 @@
|
||||
"name": "QNAP"
|
||||
},
|
||||
"qnap_qsw": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "QNAP QSW"
|
||||
@@ -5413,7 +5413,7 @@
|
||||
},
|
||||
"rabbitair": {
|
||||
"name": "Rabbit Air",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5438,7 +5438,7 @@
|
||||
},
|
||||
"radiotherm": {
|
||||
"name": "Radio Thermostat",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5473,7 +5473,7 @@
|
||||
},
|
||||
"rapt_ble": {
|
||||
"name": "RAPT Bluetooth",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -5571,7 +5571,7 @@
|
||||
},
|
||||
"renson": {
|
||||
"name": "Renson",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5679,13 +5679,13 @@
|
||||
},
|
||||
"romy": {
|
||||
"name": "ROMY Vacuum Cleaner",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"roomba": {
|
||||
"name": "iRobot Roomba and Braava",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -5720,7 +5720,7 @@
|
||||
},
|
||||
"rova": {
|
||||
"name": "ROVA",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -5763,13 +5763,13 @@
|
||||
"name": "Ruuvi",
|
||||
"integrations": {
|
||||
"ruuvi_gateway": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Ruuvi Gateway"
|
||||
},
|
||||
"ruuvitag_ble": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "Ruuvi BLE"
|
||||
@@ -5784,7 +5784,7 @@
|
||||
},
|
||||
"sabnzbd": {
|
||||
"name": "SABnzbd",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
|
||||
@@ -370,9 +370,13 @@ def _async_get_connector(
|
||||
return connectors[connector_key]
|
||||
|
||||
if verify_ssl:
|
||||
ssl_context: SSLContext = ssl_util.client_context(ssl_cipher)
|
||||
ssl_context: SSLContext = ssl_util.client_context(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
else:
|
||||
ssl_context = ssl_util.client_context_no_verify(ssl_cipher)
|
||||
ssl_context = ssl_util.client_context_no_verify(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
connector = HomeAssistantTCPConnector(
|
||||
family=family,
|
||||
|
||||
@@ -17,6 +17,7 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Final,
|
||||
Literal,
|
||||
Protocol,
|
||||
TypedDict,
|
||||
Unpack,
|
||||
@@ -28,7 +29,10 @@ from typing import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_FLOOR_ID,
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ABOVE,
|
||||
CONF_AFTER,
|
||||
CONF_ATTRIBUTE,
|
||||
@@ -1346,13 +1350,18 @@ def async_extract_entities(config: ConfigType | Template) -> set[str]:
|
||||
if entity_ids is not None:
|
||||
referenced.update(entity_ids)
|
||||
|
||||
if target_entities := _get_targets_from_condition_config(
|
||||
config, CONF_ENTITY_ID
|
||||
):
|
||||
referenced.update(target_entities)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_devices(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract devices from a condition."""
|
||||
referenced = set()
|
||||
referenced: set[str] = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
@@ -1366,15 +1375,75 @@ def async_extract_devices(config: ConfigType | Template) -> set[str]:
|
||||
to_process.extend(config["conditions"])
|
||||
continue
|
||||
|
||||
if condition != "device":
|
||||
if condition == "device":
|
||||
if (device_id := config.get(CONF_DEVICE_ID)) is not None:
|
||||
referenced.add(device_id)
|
||||
continue
|
||||
|
||||
if (device_id := config.get(CONF_DEVICE_ID)) is not None:
|
||||
referenced.add(device_id)
|
||||
if target_devices := _get_targets_from_condition_config(config, CONF_DEVICE_ID):
|
||||
referenced.update(target_devices)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_areas(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract areas from a condition."""
|
||||
return _async_extract_targets(config, ATTR_AREA_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_floors(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract floors from a condition."""
|
||||
return _async_extract_targets(config, ATTR_FLOOR_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_labels(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract labels from a condition."""
|
||||
return _async_extract_targets(config, ATTR_LABEL_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_extract_targets(
|
||||
config: ConfigType | Template,
|
||||
target_type: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> set[str]:
|
||||
"""Extract targets from a condition."""
|
||||
referenced: set[str] = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
config = to_process.popleft()
|
||||
if isinstance(config, Template):
|
||||
continue
|
||||
|
||||
condition = config[CONF_CONDITION]
|
||||
|
||||
if condition in ("and", "not", "or"):
|
||||
to_process.extend(config["conditions"])
|
||||
continue
|
||||
|
||||
if targets := _get_targets_from_condition_config(config, target_type):
|
||||
referenced.update(targets)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def _get_targets_from_condition_config(
|
||||
config: ConfigType,
|
||||
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> list[str]:
|
||||
"""Extract targets from a condition target config."""
|
||||
if not (target_conf := config.get(CONF_TARGET)):
|
||||
return []
|
||||
if not (targets := target_conf.get(target)):
|
||||
return []
|
||||
|
||||
return [targets] if isinstance(targets, str) else targets
|
||||
|
||||
|
||||
def _load_conditions_file(integration: Integration) -> dict[str, Any]:
|
||||
"""Load conditions file for an integration."""
|
||||
try:
|
||||
|
||||
@@ -17,6 +17,9 @@ from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.ssl import (
|
||||
SSL_ALPN_HTTP11,
|
||||
SSL_ALPN_HTTP11_HTTP2,
|
||||
SSLALPNProtocols,
|
||||
SSLCipherList,
|
||||
client_context,
|
||||
create_no_verify_ssl_context,
|
||||
@@ -28,9 +31,9 @@ from .frame import warn_use
|
||||
# and we want to keep the connection open for a while so we
|
||||
# don't have to reconnect every time so we use 15s to match aiohttp.
|
||||
KEEP_ALIVE_TIMEOUT = 15
|
||||
DATA_ASYNC_CLIENT: HassKey[httpx.AsyncClient] = HassKey("httpx_async_client")
|
||||
DATA_ASYNC_CLIENT_NOVERIFY: HassKey[httpx.AsyncClient] = HassKey(
|
||||
"httpx_async_client_noverify"
|
||||
# Shared httpx clients keyed by (verify_ssl, alpn_protocols)
|
||||
DATA_ASYNC_CLIENT: HassKey[dict[tuple[bool, SSLALPNProtocols], httpx.AsyncClient]] = (
|
||||
HassKey("httpx_async_client")
|
||||
)
|
||||
DEFAULT_LIMITS = limits = httpx.Limits(keepalive_expiry=KEEP_ALIVE_TIMEOUT)
|
||||
SERVER_SOFTWARE = (
|
||||
@@ -42,15 +45,26 @@ USER_AGENT = "User-Agent"
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
|
||||
def get_async_client(
|
||||
hass: HomeAssistant,
|
||||
verify_ssl: bool = True,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Return default httpx AsyncClient.
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
|
||||
|
||||
if (client := hass.data.get(key)) is None:
|
||||
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 to get a client configured for HTTP/2.
|
||||
Clients are cached separately by ALPN protocol to ensure proper SSL context
|
||||
configuration (ALPN protocols differ between HTTP versions).
|
||||
"""
|
||||
client_key = (verify_ssl, alpn_protocols)
|
||||
clients = hass.data.setdefault(DATA_ASYNC_CLIENT, {})
|
||||
|
||||
if (client := clients.get(client_key)) is None:
|
||||
client = clients[client_key] = create_async_httpx_client(
|
||||
hass, verify_ssl, alpn_protocols=alpn_protocols
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
@@ -77,6 +91,7 @@ def create_async_httpx_client(
|
||||
verify_ssl: bool = True,
|
||||
auto_cleanup: bool = True,
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
**kwargs: Any,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
|
||||
@@ -84,13 +99,22 @@ def create_async_httpx_client(
|
||||
If auto_cleanup is False, the client will be
|
||||
automatically closed on homeassistant_stop.
|
||||
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 for HTTP/2 support (automatically
|
||||
enables httpx http2 mode).
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
# Use the requested ALPN protocols directly to ensure proper SSL context
|
||||
# bucketing. httpx/httpcore mutates SSL contexts by calling set_alpn_protocols(),
|
||||
# so we pre-set the correct protocols to prevent shared context corruption.
|
||||
ssl_context = (
|
||||
client_context(ssl_cipher_list)
|
||||
client_context(ssl_cipher_list, alpn_protocols)
|
||||
if verify_ssl
|
||||
else create_no_verify_ssl_context(ssl_cipher_list)
|
||||
else create_no_verify_ssl_context(ssl_cipher_list, alpn_protocols)
|
||||
)
|
||||
# Enable httpx HTTP/2 mode when HTTP/2 protocol is requested
|
||||
if alpn_protocols == SSL_ALPN_HTTP11_HTTP2:
|
||||
kwargs.setdefault("http2", True)
|
||||
client = HassHttpXAsyncClient(
|
||||
verify=ssl_context,
|
||||
headers={USER_AGENT: SERVER_SOFTWARE},
|
||||
|
||||
@@ -594,6 +594,8 @@ class EntityNumericalStateAttributeChangedTriggerBase(EntityTriggerBase):
|
||||
_above: None | float | str
|
||||
_below: None | float | str
|
||||
|
||||
_converter: Callable[[Any], float] = float
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
@@ -616,7 +618,7 @@ class EntityNumericalStateAttributeChangedTriggerBase(EntityTriggerBase):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_value = float(_attribute_value)
|
||||
current_value = self._converter(_attribute_value)
|
||||
except (TypeError, ValueError):
|
||||
# Attribute is not a valid number, don't trigger
|
||||
return False
|
||||
@@ -706,6 +708,8 @@ class EntityNumericalStateAttributeCrossedThresholdTriggerBase(EntityTriggerBase
|
||||
_upper_limit: float | str | None = None
|
||||
_threshold_type: ThresholdType
|
||||
|
||||
_converter: Callable[[Any], float] = float
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
@@ -741,7 +745,7 @@ class EntityNumericalStateAttributeCrossedThresholdTriggerBase(EntityTriggerBase
|
||||
return False
|
||||
|
||||
try:
|
||||
current_value = float(_attribute_value)
|
||||
current_value = self._converter(_attribute_value)
|
||||
except (TypeError, ValueError):
|
||||
# Attribute is not a valid number, don't trigger
|
||||
return False
|
||||
|
||||
@@ -36,7 +36,7 @@ fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.4.0
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.8.0
|
||||
hass-nabucasa==1.9.0
|
||||
hass-nabucasa==1.11.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260107.2
|
||||
|
||||
@@ -85,6 +85,9 @@
|
||||
"timeout_connect": "Timeout establishing connection",
|
||||
"unknown": "Unexpected error"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"account": "Add account"
|
||||
},
|
||||
"title": {
|
||||
"oauth2_pick_implementation": "Pick authentication method",
|
||||
"reauth": "Authentication expired for {name}",
|
||||
|
||||
@@ -8,6 +8,17 @@ import ssl
|
||||
|
||||
import certifi
|
||||
|
||||
# Type alias for ALPN protocols tuple (None means no ALPN protocols set)
|
||||
type SSLALPNProtocols = tuple[str, ...] | None
|
||||
|
||||
# ALPN protocol configurations
|
||||
# No ALPN protocols - used for libraries that don't support/need ALPN (e.g., aioimap)
|
||||
SSL_ALPN_NONE: SSLALPNProtocols = None
|
||||
# HTTP/1.1 only - used by default and for aiohttp (which doesn't support HTTP/2)
|
||||
SSL_ALPN_HTTP11: SSLALPNProtocols = ("http/1.1",)
|
||||
# HTTP/1.1 with HTTP/2 support - used when httpx http2=True
|
||||
SSL_ALPN_HTTP11_HTTP2: SSLALPNProtocols = ("http/1.1", "h2")
|
||||
|
||||
|
||||
class SSLCipherList(StrEnum):
|
||||
"""SSL cipher lists."""
|
||||
@@ -64,7 +75,10 @@ SSL_CIPHER_LISTS = {
|
||||
|
||||
|
||||
@cache
|
||||
def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
def _client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList,
|
||||
alpn_protocols: SSLALPNProtocols,
|
||||
) -> ssl.SSLContext:
|
||||
# This is a copy of aiohttp's create_default_context() function, with the
|
||||
# ssl verify turned off.
|
||||
# https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
|
||||
@@ -78,12 +92,18 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
sslcontext.set_default_verify_paths()
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
|
||||
def _create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# Reuse environment variable definition from requests, since it's already a
|
||||
@@ -96,6 +116,11 @@ def _create_client_context(
|
||||
)
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
@@ -103,63 +128,63 @@ def _create_client_context(
|
||||
@cache
|
||||
def _client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
# Cached version of _create_client_context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
# Create this only once and reuse it
|
||||
_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT)
|
||||
_NO_VERIFY_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE),
|
||||
}
|
||||
_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE),
|
||||
}
|
||||
# Pre-warm the cache for ALL SSL context configurations at module load time.
|
||||
# This is critical because creating SSL contexts loads certificates from disk,
|
||||
# which is blocking I/O that must not happen in the event loop.
|
||||
_SSL_ALPN_PROTOCOLS = (SSL_ALPN_NONE, SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2)
|
||||
for _cipher in SSLCipherList:
|
||||
for _alpn in _SSL_ALPN_PROTOCOLS:
|
||||
_client_context(_cipher, _alpn)
|
||||
_client_context_no_verify(_cipher, _alpn)
|
||||
|
||||
|
||||
def get_default_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context."""
|
||||
return _DEFAULT_SSL_CONTEXT
|
||||
return _client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def get_default_no_verify_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context that does not verify the server certificate."""
|
||||
return _DEFAULT_NO_VERIFY_SSL_CONTEXT
|
||||
return _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return a SSL context with no verification with a specific ssl cipher."""
|
||||
return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context for making requests."""
|
||||
return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT)
|
||||
return _client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# This explicitly uses the non-cached version to create a client context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_no_verify_ssl_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context that does not verify the server certificate."""
|
||||
return _client_context_no_verify(ssl_cipher_list)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def server_context_modern() -> ssl.SSLContext:
|
||||
|
||||
@@ -103,6 +103,7 @@ _AMBIENT_IDEAL_GAS_MOLAR_VOLUME = ( # m3⋅mol⁻¹
|
||||
)
|
||||
# Molar masses in g⋅mol⁻¹
|
||||
_CARBON_MONOXIDE_MOLAR_MASS = 28.01
|
||||
_SULPHUR_DIOXIDE_MOLAR_MASS = 64.066
|
||||
|
||||
|
||||
class BaseUnitConverter:
|
||||
@@ -193,6 +194,7 @@ class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
|
||||
|
||||
UNIT_CLASS = "carbon_monoxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_PARTS_PER_MILLION: 1e6,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: (
|
||||
_CARBON_MONOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e3
|
||||
@@ -202,12 +204,29 @@ class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert sulphur dioxide ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "sulphur_dioxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_SULPHUR_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class DataRateConverter(BaseUnitConverter):
|
||||
"""Utility to convert data rate values."""
|
||||
|
||||
|
||||
@@ -168,7 +168,6 @@ _TEST_FIXTURES: dict[str, list[str] | str] = {
|
||||
"service_calls": "list[ServiceCall]",
|
||||
"snapshot": "SnapshotAssertion",
|
||||
"socket_enabled": "None",
|
||||
"stub_blueprint_populate": "None",
|
||||
"tmp_path": "Path",
|
||||
"tmpdir": "py.path.local",
|
||||
"tts_mutagen_mock": "MagicMock",
|
||||
|
||||
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.9.0",
|
||||
"hass-nabucasa==1.11.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -24,7 +24,7 @@ cronsim==2.7
|
||||
cryptography==46.0.2
|
||||
fnv-hash-fast==1.6.0
|
||||
ha-ffmpeg==3.2.2
|
||||
hass-nabucasa==1.9.0
|
||||
hass-nabucasa==1.11.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.1.6
|
||||
|
||||
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -1171,7 +1171,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.9.0
|
||||
hass-nabucasa==1.11.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1646,7 +1646,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.1.0
|
||||
onedrive-personal-sdk==0.1.1
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -2045,7 +2045,7 @@ pyfibaro==0.8.3
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.firefly_iii
|
||||
pyfirefly==0.1.11
|
||||
pyfirefly==0.1.12
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.46
|
||||
@@ -2147,7 +2147,7 @@ pykmtronic==0.3.0
|
||||
pykodi==0.2.7
|
||||
|
||||
# homeassistant.components.kostal_plenticore
|
||||
pykoplenti==1.3.0
|
||||
pykoplenti==1.5.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
@@ -2377,7 +2377,7 @@ pysabnzbd==1.1.1
|
||||
pysaj==0.0.16
|
||||
|
||||
# homeassistant.components.saunum
|
||||
pysaunum==0.2.0
|
||||
pysaunum==0.3.0
|
||||
|
||||
# homeassistant.components.schlage
|
||||
pyschlage==2025.9.0
|
||||
@@ -3170,7 +3170,7 @@ wallbox==0.9.0
|
||||
watchdog==6.0.0
|
||||
|
||||
# homeassistant.components.waterfurnace
|
||||
waterfurnace==1.2.0
|
||||
waterfurnace==1.4.0
|
||||
|
||||
# homeassistant.components.watergate
|
||||
watergate-local-api==2025.1.0
|
||||
|
||||
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@@ -1041,7 +1041,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.7
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.9.0
|
||||
hass-nabucasa==1.11.0
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1429,7 +1429,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.1.0
|
||||
onedrive-personal-sdk==0.1.1
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -1737,7 +1737,7 @@ pyfibaro==0.8.3
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.firefly_iii
|
||||
pyfirefly==0.1.11
|
||||
pyfirefly==0.1.12
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.46
|
||||
@@ -1821,7 +1821,7 @@ pykmtronic==0.3.0
|
||||
pykodi==0.2.7
|
||||
|
||||
# homeassistant.components.kostal_plenticore
|
||||
pykoplenti==1.3.0
|
||||
pykoplenti==1.5.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
@@ -2012,7 +2012,7 @@ pyrympro==0.0.9
|
||||
pysabnzbd==1.1.1
|
||||
|
||||
# homeassistant.components.saunum
|
||||
pysaunum==0.2.0
|
||||
pysaunum==0.3.0
|
||||
|
||||
# homeassistant.components.schlage
|
||||
pyschlage==2025.9.0
|
||||
|
||||
@@ -214,6 +214,10 @@ def gen_data_entry_schema(
|
||||
vol.Required("user"): translation_value_validator,
|
||||
str: translation_value_validator,
|
||||
}
|
||||
else:
|
||||
schema[vol.Optional("initiate_flow")] = {
|
||||
vol.Required("user"): translation_value_validator,
|
||||
}
|
||||
if flow_title == REQUIRED:
|
||||
schema[vol.Required("title")] = translation_value_validator
|
||||
elif flow_title == REMOVED:
|
||||
|
||||
@@ -100,6 +100,13 @@ async def target_entities(
|
||||
suggested_object_id=f"device_{domain}",
|
||||
device_id=device.id,
|
||||
)
|
||||
entity_reg.async_get_or_create(
|
||||
domain=domain,
|
||||
platform="test",
|
||||
unique_id=f"{domain}_device2",
|
||||
suggested_object_id=f"device2_{domain}",
|
||||
device_id=device.id,
|
||||
)
|
||||
entity_reg.async_get_or_create(
|
||||
domain=domain,
|
||||
platform="test",
|
||||
@@ -130,9 +137,11 @@ async def target_entities(
|
||||
return {
|
||||
"included": [
|
||||
f"{domain}.standalone_{domain}",
|
||||
f"{domain}.standalone2_{domain}",
|
||||
f"{domain}.label_{domain}",
|
||||
f"{domain}.area_{domain}",
|
||||
f"{domain}.device_{domain}",
|
||||
f"{domain}.device2_{domain}",
|
||||
],
|
||||
"excluded": [
|
||||
f"{domain}.standalone_{domain}_excluded",
|
||||
@@ -150,17 +159,22 @@ def parametrize_target_entities(domain: str) -> list[tuple[dict, str, int]]:
|
||||
"""
|
||||
return [
|
||||
(
|
||||
{CONF_ENTITY_ID: f"{domain}.standalone_{domain}"},
|
||||
{
|
||||
CONF_ENTITY_ID: [
|
||||
f"{domain}.standalone_{domain}",
|
||||
f"{domain}.standalone2_{domain}",
|
||||
]
|
||||
},
|
||||
f"{domain}.standalone_{domain}",
|
||||
1,
|
||||
2,
|
||||
),
|
||||
({ATTR_LABEL_ID: "test_label"}, f"{domain}.label_{domain}", 2),
|
||||
({ATTR_AREA_ID: "test_area"}, f"{domain}.area_{domain}", 2),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.area_{domain}", 2),
|
||||
({ATTR_LABEL_ID: "test_label"}, f"{domain}.device_{domain}", 2),
|
||||
({ATTR_AREA_ID: "test_area"}, f"{domain}.device_{domain}", 2),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.device_{domain}", 2),
|
||||
({ATTR_DEVICE_ID: "test_device"}, f"{domain}.device_{domain}", 1),
|
||||
({ATTR_LABEL_ID: "test_label"}, f"{domain}.label_{domain}", 3),
|
||||
({ATTR_AREA_ID: "test_area"}, f"{domain}.area_{domain}", 3),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.area_{domain}", 3),
|
||||
({ATTR_LABEL_ID: "test_label"}, f"{domain}.device_{domain}", 3),
|
||||
({ATTR_AREA_ID: "test_area"}, f"{domain}.device_{domain}", 3),
|
||||
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.device_{domain}", 3),
|
||||
({ATTR_DEVICE_ID: "test_device"}, f"{domain}.device_{domain}", 2),
|
||||
]
|
||||
|
||||
|
||||
@@ -184,18 +198,19 @@ class ConditionStateDescription(TypedDict):
|
||||
|
||||
included: _StateDescription # State for entities meant to be targeted
|
||||
excluded: _StateDescription # State for entities not meant to be targeted
|
||||
state_valid: bool # False if the state of the included entities is missing (None), unavailable or unknown
|
||||
|
||||
condition_true: bool # If the condition is expected to evaluate to true
|
||||
condition_true_first_entity: bool # If the condition is expected to evaluate to true for the first targeted entity
|
||||
|
||||
|
||||
def parametrize_condition_states(
|
||||
def _parametrize_condition_states(
|
||||
*,
|
||||
condition: str,
|
||||
condition_options: dict[str, Any] | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
additional_attributes: dict | None,
|
||||
condition_true_if_invalid: bool,
|
||||
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
|
||||
"""Parametrize states and expected condition evaluations.
|
||||
|
||||
@@ -212,7 +227,7 @@ def parametrize_condition_states(
|
||||
def state_with_attributes(
|
||||
state: str | None | tuple[str | None, dict],
|
||||
condition_true: bool,
|
||||
state_valid: bool,
|
||||
condition_true_first_entity: bool,
|
||||
) -> ConditionStateDescription:
|
||||
"""Return ConditionStateDescription dict."""
|
||||
if isinstance(state, str) or state is None:
|
||||
@@ -226,7 +241,7 @@ def parametrize_condition_states(
|
||||
"attributes": {},
|
||||
},
|
||||
"condition_true": condition_true,
|
||||
"state_valid": state_valid,
|
||||
"condition_true_first_entity": condition_true_first_entity,
|
||||
}
|
||||
return {
|
||||
"included": {
|
||||
@@ -238,7 +253,7 @@ def parametrize_condition_states(
|
||||
"attributes": state[1],
|
||||
},
|
||||
"condition_true": condition_true,
|
||||
"state_valid": state_valid,
|
||||
"condition_true_first_entity": condition_true_first_entity,
|
||||
}
|
||||
|
||||
return [
|
||||
@@ -247,11 +262,19 @@ def parametrize_condition_states(
|
||||
condition_options,
|
||||
list(
|
||||
itertools.chain(
|
||||
(state_with_attributes(None, False, False),),
|
||||
(state_with_attributes(STATE_UNAVAILABLE, False, False),),
|
||||
(state_with_attributes(STATE_UNKNOWN, False, False),),
|
||||
(state_with_attributes(None, condition_true_if_invalid, True),),
|
||||
(
|
||||
state_with_attributes(other_state, False, True)
|
||||
state_with_attributes(
|
||||
STATE_UNAVAILABLE, condition_true_if_invalid, True
|
||||
),
|
||||
),
|
||||
(
|
||||
state_with_attributes(
|
||||
STATE_UNKNOWN, condition_true_if_invalid, True
|
||||
),
|
||||
),
|
||||
(
|
||||
state_with_attributes(other_state, False, False)
|
||||
for other_state in other_states
|
||||
),
|
||||
),
|
||||
@@ -263,8 +286,8 @@ def parametrize_condition_states(
|
||||
condition,
|
||||
condition_options,
|
||||
[
|
||||
state_with_attributes(other_states[0], False, True),
|
||||
state_with_attributes(target_state, True, True),
|
||||
state_with_attributes(other_states[0], False, False),
|
||||
state_with_attributes(target_state, True, False),
|
||||
],
|
||||
)
|
||||
for target_state in target_states
|
||||
@@ -272,6 +295,60 @@ def parametrize_condition_states(
|
||||
]
|
||||
|
||||
|
||||
def parametrize_condition_states_any(
|
||||
*,
|
||||
condition: str,
|
||||
condition_options: dict[str, Any] | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
|
||||
"""Parametrize states and expected condition evaluations.
|
||||
|
||||
The target_states and other_states iterables are either iterables of
|
||||
states or iterables of (state, attributes) tuples.
|
||||
|
||||
Returns a list of tuples with (condition, condition options, list of states),
|
||||
where states is a list of ConditionStateDescription dicts.
|
||||
"""
|
||||
|
||||
return _parametrize_condition_states(
|
||||
condition=condition,
|
||||
condition_options=condition_options,
|
||||
target_states=target_states,
|
||||
other_states=other_states,
|
||||
additional_attributes=additional_attributes,
|
||||
condition_true_if_invalid=False,
|
||||
)
|
||||
|
||||
|
||||
def parametrize_condition_states_all(
|
||||
*,
|
||||
condition: str,
|
||||
condition_options: dict[str, Any] | None = None,
|
||||
target_states: list[str | None | tuple[str | None, dict]],
|
||||
other_states: list[str | None | tuple[str | None, dict]],
|
||||
additional_attributes: dict | None = None,
|
||||
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
|
||||
"""Parametrize states and expected condition evaluations.
|
||||
|
||||
The target_states and other_states iterables are either iterables of
|
||||
states or iterables of (state, attributes) tuples.
|
||||
|
||||
Returns a list of tuples with (condition, condition options, list of states),
|
||||
where states is a list of ConditionStateDescription dicts.
|
||||
"""
|
||||
|
||||
return _parametrize_condition_states(
|
||||
condition=condition,
|
||||
condition_options=condition_options,
|
||||
target_states=target_states,
|
||||
other_states=other_states,
|
||||
additional_attributes=additional_attributes,
|
||||
condition_true_if_invalid=True,
|
||||
)
|
||||
|
||||
|
||||
def parametrize_trigger_states(
|
||||
*,
|
||||
trigger: str,
|
||||
|
||||
@@ -16,18 +16,14 @@ from tests.components import (
|
||||
assert_condition_gated_by_labs_flag,
|
||||
create_target_condition,
|
||||
other_states,
|
||||
parametrize_condition_states,
|
||||
parametrize_condition_states_all,
|
||||
parametrize_condition_states_any,
|
||||
parametrize_target_entities,
|
||||
set_or_remove_state,
|
||||
target_entities,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_alarm_control_panels(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple alarm_control_panel entities associated with different targets."""
|
||||
@@ -61,7 +57,7 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_armed",
|
||||
target_states=[
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
@@ -78,7 +74,7 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
],
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_armed_away",
|
||||
target_states=[AlarmControlPanelState.ARMED_AWAY],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_AWAY),
|
||||
@@ -86,7 +82,7 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_armed_home",
|
||||
target_states=[AlarmControlPanelState.ARMED_HOME],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_HOME),
|
||||
@@ -94,7 +90,7 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_HOME
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_armed_night",
|
||||
target_states=[AlarmControlPanelState.ARMED_NIGHT],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_NIGHT),
|
||||
@@ -102,7 +98,7 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_armed_vacation",
|
||||
target_states=[AlarmControlPanelState.ARMED_VACATION],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_VACATION),
|
||||
@@ -110,12 +106,12 @@ async def test_alarm_control_panel_conditions_gated_by_labs_flag(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_disarmed",
|
||||
target_states=[AlarmControlPanelState.DISARMED],
|
||||
other_states=other_states(AlarmControlPanelState.DISARMED),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="alarm_control_panel.is_triggered",
|
||||
target_states=[AlarmControlPanelState.TRIGGERED],
|
||||
other_states=other_states(AlarmControlPanelState.TRIGGERED),
|
||||
@@ -168,7 +164,7 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_armed",
|
||||
target_states=[
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
@@ -185,7 +181,7 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
],
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_armed_away",
|
||||
target_states=[AlarmControlPanelState.ARMED_AWAY],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_AWAY),
|
||||
@@ -193,7 +189,7 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_armed_home",
|
||||
target_states=[AlarmControlPanelState.ARMED_HOME],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_HOME),
|
||||
@@ -201,7 +197,7 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_HOME
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_armed_night",
|
||||
target_states=[AlarmControlPanelState.ARMED_NIGHT],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_NIGHT),
|
||||
@@ -209,7 +205,7 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_armed_vacation",
|
||||
target_states=[AlarmControlPanelState.ARMED_VACATION],
|
||||
other_states=other_states(AlarmControlPanelState.ARMED_VACATION),
|
||||
@@ -217,12 +213,12 @@ async def test_alarm_control_panel_state_condition_behavior_any(
|
||||
ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
},
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_disarmed",
|
||||
target_states=[AlarmControlPanelState.DISARMED],
|
||||
other_states=other_states(AlarmControlPanelState.DISARMED),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="alarm_control_panel.is_triggered",
|
||||
target_states=[AlarmControlPanelState.TRIGGERED],
|
||||
other_states=other_states(AlarmControlPanelState.TRIGGERED),
|
||||
@@ -259,17 +255,10 @@ async def test_alarm_control_panel_state_condition_behavior_all(
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
# The condition passes if all entities are either in a target state or invalid
|
||||
assert condition(hass) == (
|
||||
(not state["state_valid"])
|
||||
or (state["condition_true"] and entities_in_target == 1)
|
||||
)
|
||||
assert condition(hass) == state["condition_true_first_entity"]
|
||||
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# The condition passes if all entities are either in a target state or invalid
|
||||
assert condition(hass) == (
|
||||
(not state["state_valid"]) or state["condition_true"]
|
||||
)
|
||||
assert condition(hass) == state["condition_true"]
|
||||
|
||||
@@ -25,11 +25,6 @@ from tests.common import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("set_state", "features_reg", "features_state", "expected_action_types"),
|
||||
[
|
||||
|
||||
@@ -18,11 +18,6 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry, async_get_device_automations
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("set_state", "features_reg", "features_state", "expected_condition_types"),
|
||||
[
|
||||
|
||||
@@ -26,11 +26,6 @@ from tests.common import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("set_state", "features_reg", "features_state", "expected_trigger_types"),
|
||||
[
|
||||
|
||||
@@ -22,11 +22,6 @@ from tests.components import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_alarm_control_panels(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple alarm control panel entities associated with different targets."""
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""The tests for Arcam FMJ Receiver control device triggers."""
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import automation
|
||||
from homeassistant.components.arcam_fmj.const import DOMAIN
|
||||
from homeassistant.components.device_automation import DeviceAutomationType
|
||||
@@ -12,11 +10,6 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry, async_get_device_automations
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
async def test_get_triggers(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
|
||||
@@ -12,18 +12,14 @@ from tests.components import (
|
||||
assert_condition_gated_by_labs_flag,
|
||||
create_target_condition,
|
||||
other_states,
|
||||
parametrize_condition_states,
|
||||
parametrize_condition_states_all,
|
||||
parametrize_condition_states_any,
|
||||
parametrize_target_entities,
|
||||
set_or_remove_state,
|
||||
target_entities,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_assist_satellites(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple assist satellite entities associated with different targets."""
|
||||
@@ -54,22 +50,22 @@ async def test_assist_satellite_conditions_gated_by_labs_flag(
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="assist_satellite.is_idle",
|
||||
target_states=[AssistSatelliteState.IDLE],
|
||||
other_states=other_states(AssistSatelliteState.IDLE),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="assist_satellite.is_listening",
|
||||
target_states=[AssistSatelliteState.LISTENING],
|
||||
other_states=other_states(AssistSatelliteState.LISTENING),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="assist_satellite.is_processing",
|
||||
target_states=[AssistSatelliteState.PROCESSING],
|
||||
other_states=other_states(AssistSatelliteState.PROCESSING),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_any(
|
||||
condition="assist_satellite.is_responding",
|
||||
target_states=[AssistSatelliteState.RESPONDING],
|
||||
other_states=other_states(AssistSatelliteState.RESPONDING),
|
||||
@@ -122,22 +118,22 @@ async def test_assist_satellite_state_condition_behavior_any(
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="assist_satellite.is_idle",
|
||||
target_states=[AssistSatelliteState.IDLE],
|
||||
other_states=other_states(AssistSatelliteState.IDLE),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="assist_satellite.is_listening",
|
||||
target_states=[AssistSatelliteState.LISTENING],
|
||||
other_states=other_states(AssistSatelliteState.LISTENING),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="assist_satellite.is_processing",
|
||||
target_states=[AssistSatelliteState.PROCESSING],
|
||||
other_states=other_states(AssistSatelliteState.PROCESSING),
|
||||
),
|
||||
*parametrize_condition_states(
|
||||
*parametrize_condition_states_all(
|
||||
condition="assist_satellite.is_responding",
|
||||
target_states=[AssistSatelliteState.RESPONDING],
|
||||
other_states=other_states(AssistSatelliteState.RESPONDING),
|
||||
@@ -174,17 +170,10 @@ async def test_assist_satellite_state_condition_behavior_all(
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
# The condition passes if all entities are either in a target state or invalid
|
||||
assert condition(hass) == (
|
||||
(not state["state_valid"])
|
||||
or (state["condition_true"] and entities_in_target == 1)
|
||||
)
|
||||
assert condition(hass) == state["condition_true_first_entity"]
|
||||
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# The condition passes if all entities are either in a target state or invalid
|
||||
assert condition(hass) == (
|
||||
(not state["state_valid"]) or state["condition_true"]
|
||||
)
|
||||
assert condition(hass) == state["condition_true"]
|
||||
|
||||
@@ -19,11 +19,6 @@ from tests.components import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_assist_satellites(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple assist satellite entities associated with different targets."""
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
"""Conftest for automation tests."""
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user