Compare commits

..

9 Commits

Author SHA1 Message Date
J. Nick Koston
b6bb157141 http2 checks 2026-01-20 21:46:24 -10:00
J. Nick Koston
d589b9eb8d more flexible for the future 2026-01-20 11:15:25 -10:00
J. Nick Koston
256d47775b better design for future where we live in http3 land 2026-01-20 11:08:25 -10:00
J. Nick Koston
c9eae821e8 Merge remote-tracking branch 'upstream/dev' into ssl_context_mutates 2026-01-20 11:04:06 -10:00
J. Nick Koston
8c02268638 delete brittle tests that were only needed to make sure it works once 2026-01-20 10:44:57 -10:00
J. Nick Koston
8436676e67 Merge branch 'dev' into ssl_context_mutates 2026-01-20 10:42:57 -10:00
J. Nick Koston
df10ffd508 avoid blocking I/O 2026-01-20 10:37:19 -10:00
J. Nick Koston
02218fab7b warm insecure as well 2026-01-20 10:34:10 -10:00
J. Nick Koston
a7cfac2618 Fix SSL context mutation by httpx/httpcore with ALPN protocol bucketing 2026-01-20 08:54:52 -10:00
54 changed files with 852 additions and 1014 deletions

View File

@@ -1187,8 +1187,6 @@ jobs:
- pytest-postgres
- pytest-mariadb
timeout-minutes: 10
permissions:
id-token: write
# codecov/test-results-action currently doesn't support tokenless uploads
# therefore we can't run it on forks
if: |
@@ -1200,9 +1198,8 @@ jobs:
with:
pattern: test-results-*
- name: Upload test results to Codecov
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
with:
report_type: test_results
fail_ci_if_error: true
verbose: true
use_oidc: true
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -56,7 +56,7 @@ from homeassistant.core import (
valid_entity_id,
)
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
from homeassistant.helpers import condition as condition_helper, config_validation as cv
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.issue_registry import (
@@ -125,7 +125,6 @@ NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
_EXPERIMENTAL_CONDITION_PLATFORMS = {
"alarm_control_panel",
"assist_satellite",
"device_tracker",
"fan",
"light",
"siren",
@@ -555,7 +554,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
automation_id: str | None,
name: str,
trigger_config: list[ConfigType],
condition: IfAction | None,
cond_func: IfAction | None,
action_script: Script,
initial_state: bool | None,
variables: ScriptVariables | None,
@@ -568,7 +567,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
self._attr_name = name
self._trigger_config = trigger_config
self._async_detach_triggers: CALLBACK_TYPE | None = None
self._condition = condition
self._cond_func = cond_func
self.action_script = action_script
self.action_script.change_listener = self.async_write_ha_state
self._initial_state = initial_state
@@ -603,11 +602,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced labels."""
referenced = self.action_script.referenced_labels
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(
conf, ATTR_LABEL_ID
)
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_labels(conf)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
@@ -618,11 +615,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced floors."""
referenced = self.action_script.referenced_floors
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(
conf, ATTR_FLOOR_ID
)
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_floors(conf)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
@@ -633,9 +628,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced areas."""
referenced = self.action_script.referenced_areas
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_areas(conf)
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
@@ -653,9 +648,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced devices."""
referenced = self.action_script.referenced_devices
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_devices(conf)
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_devices(conf)
for conf in self._trigger_config:
referenced |= set(_trigger_extract_devices(conf))
@@ -667,9 +662,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return a set of referenced entities."""
referenced = self.action_script.referenced_entities
if self._condition is not None:
for conf in self._condition.config:
referenced |= condition_helper.async_extract_entities(conf)
if self._cond_func is not None:
for conf in self._cond_func.config:
referenced |= condition.async_extract_entities(conf)
for conf in self._trigger_config:
for entity_id in _trigger_extract_entities(conf):
@@ -789,8 +784,8 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
if (
not skip_condition
and self._condition is not None
and not self._condition(variables)
and self._cond_func is not None
and not self._cond_func(variables)
):
self._logger.debug(
"Conditions not met, aborting automation. Condition summary: %s",
@@ -1052,12 +1047,12 @@ async def _create_automation_entities(
)
if CONF_CONDITIONS in config_block:
condition = await _async_process_if(hass, name, config_block)
cond_func = await _async_process_if(hass, name, config_block)
if condition is None:
if cond_func is None:
continue
else:
condition = None
cond_func = None
# Add trigger variables to variables
variables = None
@@ -1075,7 +1070,7 @@ async def _create_automation_entities(
automation_id,
name,
config_block[CONF_TRIGGERS],
condition,
cond_func,
action_script,
initial_state,
variables,
@@ -1217,7 +1212,7 @@ async def _async_process_if(
if_configs = config[CONF_CONDITIONS]
try:
if_action = await condition_helper.async_conditions_from_config(
if_action = await condition.async_conditions_from_config(
hass, if_configs, LOGGER, name
)
except HomeAssistantError as ex:

View File

@@ -1,17 +0,0 @@
"""Provides conditions for device trackers."""
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from .const import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"is_home": make_entity_state_condition(DOMAIN, STATE_HOME),
"is_not_home": make_entity_state_condition(DOMAIN, STATE_NOT_HOME),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the conditions for device trackers."""
return CONDITIONS

View File

@@ -1,17 +0,0 @@
.condition_common: &condition_common
target:
entity:
domain: device_tracker
fields:
behavior:
required: true
default: any
selector:
select:
options:
- all
- any
translation_key: condition_behavior
is_home: *condition_common
is_not_home: *condition_common

View File

@@ -1,12 +1,4 @@
{
"conditions": {
"is_home": {
"condition": "mdi:account"
},
"is_not_home": {
"condition": "mdi:account-arrow-right"
}
},
"entity_component": {
"_": {
"default": "mdi:account",

View File

@@ -1,32 +1,8 @@
{
"common": {
"condition_behavior_description": "How the state should match on the targeted device trackers.",
"condition_behavior_name": "Behavior",
"trigger_behavior_description": "The behavior of the targeted device trackers to trigger on.",
"trigger_behavior_name": "Behavior"
},
"conditions": {
"is_home": {
"description": "Tests if one or more device trackers are home.",
"fields": {
"behavior": {
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
}
},
"name": "If a device tracker is home"
},
"is_not_home": {
"description": "Tests if one or more device trackers are not home.",
"fields": {
"behavior": {
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
}
},
"name": "If a device tracker is not home"
}
},
"device_automation": {
"condition_type": {
"is_home": "{entity_name} is home",
@@ -73,12 +49,6 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -7,6 +7,9 @@
"benzene": {
"default": "mdi:molecule"
},
"nitrogen_dioxide": {
"default": "mdi:molecule"
},
"nitrogen_monoxide": {
"default": "mdi:molecule"
},
@@ -15,6 +18,9 @@
},
"ozone": {
"default": "mdi:molecule"
},
"sulphur_dioxide": {
"default": "mdi:molecule"
}
}
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["google_air_quality_api"],
"quality_scale": "bronze",
"requirements": ["google_air_quality_api==3.0.0"]
"requirements": ["google_air_quality_api==2.1.2"]
}

View File

@@ -13,11 +13,7 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
CONF_LATITUDE,
CONF_LONGITUDE,
)
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -118,7 +114,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.co.concentration.value,
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
),
AirQualitySensorEntityDescription(
key="nh3",
@@ -146,8 +141,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
),
AirQualitySensorEntityDescription(
key="no2",
translation_key="nitrogen_dioxide",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
native_unit_of_measurement_fn=lambda x: x.pollutants.no2.concentration.units,
exists_fn=lambda x: "no2" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.no2.concentration.value,
@@ -178,8 +173,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
),
AirQualitySensorEntityDescription(
key="so2",
translation_key="sulphur_dioxide",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.SULPHUR_DIOXIDE,
native_unit_of_measurement_fn=lambda x: x.pollutants.so2.concentration.units,
exists_fn=lambda x: "so2" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.so2.concentration.value,

View File

@@ -205,6 +205,9 @@
"so2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
}
},
"nitrogen_dioxide": {
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
},
"nitrogen_monoxide": {
"name": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]"
},
@@ -214,6 +217,9 @@
"ozone": {
"name": "[%key:component::sensor::entity_component::ozone::name%]"
},
"sulphur_dioxide": {
"name": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
},
"uaqi": {
"name": "Universal Air Quality Index"
},

View File

@@ -83,9 +83,6 @@
"invalid_credentials": "Input is incomplete. You must provide either your login details or an API token",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"initiate_flow": {
"user": "[%key:common::config_flow::initiate_flow::account%]"
},
"step": {
"advanced": {
"data": {

View File

@@ -6,7 +6,7 @@ from typing import Any
from aiohttp import web
from homeassistant.components import frontend
from homeassistant.components import frontend, panel_custom
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import ATTR_ICON
from homeassistant.core import HomeAssistant
@@ -33,7 +33,7 @@ async def async_setup_addon_panel(hass: HomeAssistant, hassio: HassIO) -> None:
# _register_panel never suspends and is only
# a coroutine because it would be a breaking change
# to make it a normal function
_register_panel(hass, addon, data)
await _register_panel(hass, addon, data)
class HassIOAddonPanel(HomeAssistantView):
@@ -58,7 +58,7 @@ class HassIOAddonPanel(HomeAssistantView):
data = panels[addon]
# Register panel
_register_panel(self.hass, addon, data)
await _register_panel(self.hass, addon, data)
return web.Response()
async def delete(self, request: web.Request, addon: str) -> web.Response:
@@ -76,14 +76,18 @@ class HassIOAddonPanel(HomeAssistantView):
return {}
def _register_panel(hass: HomeAssistant, addon: str, data: dict[str, Any]):
async def _register_panel(
hass: HomeAssistant, addon: str, data: dict[str, Any]
) -> None:
"""Init coroutine to register the panel."""
frontend.async_register_built_in_panel(
await panel_custom.async_register_panel(
hass,
"app",
frontend_url_path=addon,
webcomponent_name="hassio-main",
sidebar_title=data[ATTR_TITLE],
sidebar_icon=data[ATTR_ICON],
js_url="/api/hassio/app/entrypoint.js",
embed_iframe=True,
require_admin=data[ATTR_ADMIN],
config={"addon": addon},
config={"ingress": addon},
)

View File

@@ -28,6 +28,7 @@ from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN, UPDATE_INTERVAL
from .entity import AqualinkEntity
@@ -66,7 +67,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
aqualink = AqualinkClient(username, password, httpx_client=get_async_client(hass))
aqualink = AqualinkClient(
username,
password,
httpx_client=get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2),
)
try:
await aqualink.login()
except AqualinkServiceException as login_exception:

View File

@@ -15,6 +15,7 @@ import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN
@@ -36,7 +37,11 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
try:
async with AqualinkClient(
username, password, httpx_client=get_async_client(self.hass)
username,
password,
httpx_client=get_async_client(
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
),
):
pass
except AqualinkServiceUnauthorizedException:

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["imgw_pib==2.0.1"]
"requirements": ["imgw_pib==1.6.0"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_push",
"loggers": ["aionfty"],
"quality_scale": "platinum",
"requirements": ["aiontfy==0.7.0"]
"requirements": ["aiontfy==0.6.1"]
}

View File

@@ -247,7 +247,7 @@ class NumberDeviceClass(StrEnum):
NITROGEN_DIOXIDE = "nitrogen_dioxide"
"""Amount of NO2.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `μg/m³`
"""
NITROGEN_MONOXIDE = "nitrogen_monoxide"
@@ -265,7 +265,7 @@ class NumberDeviceClass(StrEnum):
OZONE = "ozone"
"""Amount of O3.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `μg/m³`
"""
PH = "ph"
@@ -517,16 +517,10 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
NumberDeviceClass.ILLUMINANCE: {LIGHT_LUX},
NumberDeviceClass.IRRADIANCE: set(UnitOfIrradiance),
NumberDeviceClass.MOISTURE: {PERCENTAGE},
NumberDeviceClass.NITROGEN_DIOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROGEN_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.OZONE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.PH: {None},
NumberDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},

View File

@@ -25,9 +25,6 @@
"folder_creation_error": "Failed to create folder",
"folder_rename_error": "Failed to rename folder"
},
"initiate_flow": {
"user": "[%key:common::config_flow::initiate_flow::account%]"
},
"step": {
"folder_name": {
"data": {

View File

@@ -4,7 +4,6 @@
"codeowners": ["@konikvranik", "@allenporter"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rainbird",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyrainbird"],
"requirements": ["pyrainbird==6.0.1"]

View File

@@ -59,8 +59,6 @@ from homeassistant.util.unit_conversion import (
InformationConverter,
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
ReactiveEnergyConverter,
@@ -227,10 +225,8 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
CarbonMonoxideConcentrationConverter,
NitrogenDioxideConcentrationConverter,
OzoneConcentrationConverter,
SulphurDioxideConcentrationConverter,
TemperatureDeltaConverter,
SulphurDioxideConcentrationConverter,
]
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {

View File

@@ -33,8 +33,6 @@ from homeassistant.util.unit_conversion import (
InformationConverter,
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
ReactiveEnergyConverter,
@@ -87,14 +85,11 @@ UNIT_SCHEMA = vol.Schema(
vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS),
vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS),
vol.Optional("electric_current"): vol.In(ElectricCurrentConverter.VALID_UNITS),
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
vol.Optional("energy"): vol.In(EnergyConverter.VALID_UNITS),
vol.Optional("energy_distance"): vol.In(EnergyDistanceConverter.VALID_UNITS),
vol.Optional("information"): vol.In(InformationConverter.VALID_UNITS),
vol.Optional("mass"): vol.In(MassConverter.VALID_UNITS),
vol.Optional("nitrogen_dioxide"): vol.In(
NitrogenDioxideConcentrationConverter.VALID_UNITS
),
vol.Optional("ozone"): vol.In(OzoneConcentrationConverter.VALID_UNITS),
vol.Optional("power"): vol.In(PowerConverter.VALID_UNITS),
vol.Optional("pressure"): vol.In(PressureConverter.VALID_UNITS),
vol.Optional("reactive_energy"): vol.In(ReactiveEnergyConverter.VALID_UNITS),
@@ -108,7 +103,6 @@ UNIT_SCHEMA = vol.Schema(
TemperatureDeltaConverter.VALID_UNITS
),
vol.Optional("unitless"): vol.In(UnitlessRatioConverter.VALID_UNITS),
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
vol.Optional("volume"): vol.In(VolumeConverter.VALID_UNITS),
vol.Optional("volume_flow_rate"): vol.In(VolumeFlowRateConverter.VALID_UNITS),
}

View File

@@ -12,9 +12,6 @@
"invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"initiate_flow": {
"user": "[%key:common::config_flow::initiate_flow::account%]"
},
"step": {
"kamereon": {
"data": {

View File

@@ -12,7 +12,6 @@
"homekit": {
"models": ["Sensibo"]
},
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["pysensibo"],
"quality_scale": "platinum",

View File

@@ -63,8 +63,6 @@ from homeassistant.util.unit_conversion import (
InformationConverter,
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
ReactiveEnergyConverter,
@@ -285,7 +283,7 @@ class SensorDeviceClass(StrEnum):
NITROGEN_DIOXIDE = "nitrogen_dioxide"
"""Amount of NO2.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `μg/m³`
"""
NITROGEN_MONOXIDE = "nitrogen_monoxide"
@@ -303,7 +301,7 @@ class SensorDeviceClass(StrEnum):
OZONE = "ozone"
"""Amount of O3.
Unit of measurement: `ppb` (parts per billion),`μg/m³`
Unit of measurement: `μg/m³`
"""
PH = "ph"
@@ -565,8 +563,6 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
SensorDeviceClass.ENERGY_DISTANCE: EnergyDistanceConverter,
SensorDeviceClass.ENERGY_STORAGE: EnergyConverter,
SensorDeviceClass.GAS: VolumeConverter,
SensorDeviceClass.NITROGEN_DIOXIDE: NitrogenDioxideConcentrationConverter,
SensorDeviceClass.OZONE: OzoneConcentrationConverter,
SensorDeviceClass.POWER: PowerConverter,
SensorDeviceClass.POWER_FACTOR: UnitlessRatioConverter,
SensorDeviceClass.PRECIPITATION: DistanceConverter,
@@ -635,16 +631,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
SensorDeviceClass.ILLUMINANCE: {LIGHT_LUX},
SensorDeviceClass.IRRADIANCE: set(UnitOfIrradiance),
SensorDeviceClass.MOISTURE: {PERCENTAGE},
SensorDeviceClass.NITROGEN_DIOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROGEN_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.OZONE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.PH: {None},
SensorDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},

View File

@@ -8,6 +8,7 @@ import logging
import aiohttp
from aiohttp.client_exceptions import ClientError, ClientResponseError
import tibber
from tibber import data_api as tibber_data_api
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import Event, HomeAssistant
@@ -22,7 +23,13 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ssl as ssl_util
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN, TibberConfigEntry
from .const import (
AUTH_IMPLEMENTATION,
CONF_LEGACY_ACCESS_TOKEN,
DATA_HASS_CONFIG,
DOMAIN,
TibberConfigEntry,
)
from .coordinator import TibberDataAPICoordinator
from .services import async_setup_services
@@ -37,23 +44,24 @@ _LOGGER = logging.getLogger(__name__)
class TibberRuntimeData:
"""Runtime data for Tibber API entries."""
tibber_connection: tibber.Tibber
session: OAuth2Session
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
_client: tibber.Tibber | None = None
_client: tibber_data_api.TibberDataAPI | None = None
async def async_get_client(self, hass: HomeAssistant) -> tibber.Tibber:
"""Return an authenticated Tibber client."""
async def async_get_client(
self, hass: HomeAssistant
) -> tibber_data_api.TibberDataAPI:
"""Return an authenticated Tibber Data API client."""
await self.session.async_ensure_token_valid()
token = self.session.token
access_token = token.get(CONF_ACCESS_TOKEN)
if not access_token:
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
if self._client is None:
self._client = tibber.Tibber(
access_token=access_token,
self._client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
self._client.set_access_token(access_token)
return self._client
@@ -80,6 +88,32 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
translation_key="data_api_reauth_required",
)
tibber_connection = tibber.Tibber(
access_token=entry.data[CONF_LEGACY_ACCESS_TOKEN],
websession=async_get_clientsession(hass),
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
) as err:
raise ConfigEntryNotReady("Unable to connect") from err
except tibber.InvalidLoginError as exp:
_LOGGER.error("Failed to login. %s", exp)
return False
except tibber.FatalHttpExceptionError:
return False
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
@@ -101,29 +135,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
raise ConfigEntryNotReady from err
entry.runtime_data = TibberRuntimeData(
tibber_connection=tibber_connection,
session=session,
)
tibber_connection = await entry.runtime_data.async_get_client(hass)
async def _close(event: Event) -> None:
await tibber_connection.rt_disconnect()
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
try:
await tibber_connection.update_info()
except (
TimeoutError,
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
) as err:
raise ConfigEntryNotReady("Unable to connect") from err
except tibber.InvalidLoginError as err:
raise ConfigEntryAuthFailed("Invalid login credentials") from err
except tibber.FatalHttpExceptionError as err:
raise ConfigEntryNotReady("Fatal HTTP error from Tibber API") from err
coordinator = TibberDataAPICoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data.data_api_coordinator = coordinator
@@ -139,6 +154,5 @@ async def async_unload_entry(
if unload_ok := await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
):
tibber_connection = await config_entry.runtime_data.async_get_client(hass)
await tibber_connection.rt_disconnect()
await config_entry.runtime_data.tibber_connection.rt_disconnect()
return unload_ok

View File

@@ -8,16 +8,21 @@ from typing import Any
import aiohttp
import tibber
from tibber import data_api as tibber_data_api
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
from .const import DATA_API_DEFAULT_SCOPES, DOMAIN
from .const import CONF_LEGACY_ACCESS_TOKEN, DATA_API_DEFAULT_SCOPES, DOMAIN
DATA_SCHEMA = vol.Schema({vol.Required(CONF_LEGACY_ACCESS_TOKEN): str})
ERR_TIMEOUT = "timeout"
ERR_CLIENT = "cannot_connect"
ERR_TOKEN = "invalid_access_token"
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
_LOGGER = logging.getLogger(__name__)
@@ -31,7 +36,8 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self._oauth_data: dict[str, Any] | None = None
self._access_token: str | None = None
self._title = ""
@property
def logger(self) -> logging.Logger:
@@ -46,70 +52,114 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
}
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle a reauth flow."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication by reusing the user step."""
"""Handle the initial step."""
if user_input is None:
return self.async_show_form(step_id="reauth_confirm")
return await self.async_step_user()
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Finalize the OAuth flow and create the config entry."""
self._oauth_data = data
return await self._async_validate_and_create()
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors={},
)
async def async_step_connection_error(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle connection error retry."""
if user_input is not None:
return await self._async_validate_and_create()
return self.async_show_form(step_id="connection_error")
async def _async_validate_and_create(self) -> ConfigFlowResult:
"""Validate the OAuth token and create the config entry."""
assert self._oauth_data is not None
access_token = self._oauth_data[CONF_TOKEN][CONF_ACCESS_TOKEN]
self._access_token = user_input[CONF_LEGACY_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=access_token,
access_token=self._access_token,
websession=async_get_clientsession(self.hass),
)
self._title = tibber_connection.name or "Tibber"
errors: dict[str, str] = {}
try:
await tibber_connection.update_info()
except TimeoutError:
return await self.async_step_connection_error()
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TIMEOUT
except tibber.InvalidLoginError:
return self.async_abort(reason=ERR_TOKEN)
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TOKEN
except (
aiohttp.ClientError,
tibber.RetryableHttpExceptionError,
tibber.FatalHttpExceptionError,
):
return await self.async_step_connection_error()
except tibber.FatalHttpExceptionError:
return self.async_abort(reason=ERR_CLIENT)
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_CLIENT
if errors:
data_schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
)
return self.async_show_form(
step_id=SOURCE_USER,
data_schema=data_schema,
description_placeholders={"url": TOKEN_URL},
errors=errors,
)
await self.async_set_unique_id(tibber_connection.user_id)
title = tibber_connection.name or "Tibber"
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
self._abort_if_unique_id_mismatch(
reason="wrong_account",
description_placeholders={"title": reauth_entry.title},
)
else:
self._abort_if_unique_id_configured()
return await self.async_step_pick_implementation()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle a reauth flow."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication by reusing the user step."""
reauth_entry = self._get_reauth_entry()
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
self._title = reauth_entry.title
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
)
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Finalize the OAuth flow and create the config entry."""
if self._access_token is None:
return self.async_abort(reason="missing_configuration")
data[CONF_LEGACY_ACCESS_TOKEN] = self._access_token
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
data_api_client = tibber_data_api.TibberDataAPI(
access_token,
websession=async_get_clientsession(self.hass),
)
try:
await data_api_client.get_userinfo()
except (aiohttp.ClientError, TimeoutError):
return self.async_abort(reason="cannot_connect")
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
return self.async_update_reload_and_abort(
reauth_entry,
data=self._oauth_data,
title=title,
data=data,
title=self._title,
)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=title, data=self._oauth_data)
return self.async_create_entry(title=self._title, data=data)

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
if TYPE_CHECKING:
from . import TibberRuntimeData
@@ -12,6 +13,8 @@ if TYPE_CHECKING:
type TibberConfigEntry = ConfigEntry[TibberRuntimeData]
CONF_LEGACY_ACCESS_TOKEN = CONF_ACCESS_TOKEN
AUTH_IMPLEMENTATION = "auth_implementation"
DATA_HASS_CONFIG = "tibber_hass_config"
DOMAIN = "tibber"

View File

@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, cast
from aiohttp.client_exceptions import ClientError
import tibber
from tibber.data_api import TibberDevice
from tibber.data_api import TibberDataAPI, TibberDevice
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.models import (
@@ -230,26 +230,28 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
return device_sensors.get(sensor_id)
return None
async def _async_get_client(self) -> tibber.Tibber:
"""Get the Tibber client with error handling."""
async def _async_get_client(self) -> TibberDataAPI:
"""Get the Tibber Data API client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
raise UpdateFailed(
f"Unable to create Tibber Data API client: {err}"
) from err
async def _async_setup(self) -> None:
"""Initial load of Tibber Data API devices."""
client = await self._async_get_client()
devices = await client.data_api.get_all_devices()
devices = await client.get_all_devices()
self._build_sensor_lookup(devices)
async def _async_update_data(self) -> dict[str, TibberDevice]:
"""Fetch the latest device capabilities from the Tibber Data API."""
client = await self._async_get_client()
try:
devices: dict[str, TibberDevice] = await client.data_api.update_devices()
devices: dict[str, TibberDevice] = await client.update_devices()
except tibber.exceptions.RateLimitExceededError as err:
raise UpdateFailed(
f"Rate limit exceeded, retry after {err.retry_after} seconds",

View File

@@ -15,7 +15,6 @@ async def async_get_config_entry_diagnostics(
"""Return diagnostics for a config entry."""
runtime = config_entry.runtime_data
tibber_connection = await runtime.async_get_client(hass)
result: dict[str, Any] = {
"homes": [
{
@@ -25,7 +24,7 @@ async def async_get_config_entry_diagnostics(
"last_cons_data_timestamp": home.last_cons_data_timestamp,
"country": home.country,
}
for home in tibber_connection.get_homes(only_active=False)
for home in runtime.tibber_connection.get_homes(only_active=False)
]
}

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
import tibber
from homeassistant.components.notify import (
ATTR_TITLE_DEFAULT,
NotifyEntity,
@@ -39,9 +37,7 @@ class TibberNotificationEntity(NotifyEntity):
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message to Tibber devices."""
tibber_connection: tibber.Tibber = (
await self._entry.runtime_data.async_get_client(self.hass)
)
tibber_connection = self._entry.runtime_data.tibber_connection
try:
await tibber_connection.send_notification(
title or ATTR_TITLE_DEFAULT, message

View File

@@ -605,7 +605,7 @@ async def _async_setup_graphql_sensors(
) -> None:
"""Set up the Tibber sensor."""
tibber_connection = await entry.runtime_data.async_get_client(hass)
tibber_connection = entry.runtime_data.tibber_connection
entity_registry = er.async_get(hass)

View File

@@ -42,7 +42,7 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
translation_domain=DOMAIN,
translation_key="no_config_entry",
)
tibber_connection = await entries[0].runtime_data.async_get_client(call.hass)
tibber_connection = entries[0].runtime_data.tibber_connection
start = __get_date(call.data.get(ATTR_START), "start")
end = __get_date(call.data.get(ATTR_END), "end")

View File

@@ -2,21 +2,26 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
},
"step": {
"connection_error": {
"description": "Could not connect to Tibber. Check your internet connection and try again.",
"title": "Connection failed"
},
"reauth_confirm": {
"description": "Reconnect your Tibber account to refresh access.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
},
"description": "Enter your access token from {url}"
}
}
},

View File

@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["uiprotect==10.0.0", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==8.1.1", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

View File

@@ -370,9 +370,13 @@ def _async_get_connector(
return connectors[connector_key]
if verify_ssl:
ssl_context: SSLContext = ssl_util.client_context(ssl_cipher)
ssl_context: SSLContext = ssl_util.client_context(
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
)
else:
ssl_context = ssl_util.client_context_no_verify(ssl_cipher)
ssl_context = ssl_util.client_context_no_verify(
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
)
connector = HomeAssistantTCPConnector(
family=family,

View File

@@ -29,7 +29,10 @@ from typing import (
import voluptuous as vol
from homeassistant.const import (
ATTR_AREA_ID,
ATTR_DEVICE_CLASS,
ATTR_FLOOR_ID,
ATTR_LABEL_ID,
CONF_ABOVE,
CONF_AFTER,
CONF_ATTRIBUTE,
@@ -1384,9 +1387,27 @@ def async_extract_devices(config: ConfigType | Template) -> set[str]:
@callback
def async_extract_targets(
def async_extract_areas(config: ConfigType | Template) -> set[str]:
"""Extract areas from a condition."""
return _async_extract_targets(config, ATTR_AREA_ID)
@callback
def async_extract_floors(config: ConfigType | Template) -> set[str]:
"""Extract floors from a condition."""
return _async_extract_targets(config, ATTR_FLOOR_ID)
@callback
def async_extract_labels(config: ConfigType | Template) -> set[str]:
"""Extract labels from a condition."""
return _async_extract_targets(config, ATTR_LABEL_ID)
@callback
def _async_extract_targets(
config: ConfigType | Template,
target_type: Literal["area_id", "floor_id", "label_id"],
target_type: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
) -> set[str]:
"""Extract targets from a condition."""
referenced: set[str] = set()

View File

@@ -17,6 +17,9 @@ from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.loader import bind_hass
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.ssl import (
SSL_ALPN_HTTP11,
SSL_ALPN_HTTP11_HTTP2,
SSLALPNProtocols,
SSLCipherList,
client_context,
create_no_verify_ssl_context,
@@ -28,9 +31,9 @@ from .frame import warn_use
# and we want to keep the connection open for a while so we
# don't have to reconnect every time so we use 15s to match aiohttp.
KEEP_ALIVE_TIMEOUT = 15
DATA_ASYNC_CLIENT: HassKey[httpx.AsyncClient] = HassKey("httpx_async_client")
DATA_ASYNC_CLIENT_NOVERIFY: HassKey[httpx.AsyncClient] = HassKey(
"httpx_async_client_noverify"
# Shared httpx clients keyed by (verify_ssl, alpn_protocols)
DATA_ASYNC_CLIENT: HassKey[dict[tuple[bool, SSLALPNProtocols], httpx.AsyncClient]] = (
HassKey("httpx_async_client")
)
DEFAULT_LIMITS = limits = httpx.Limits(keepalive_expiry=KEEP_ALIVE_TIMEOUT)
SERVER_SOFTWARE = (
@@ -42,15 +45,26 @@ USER_AGENT = "User-Agent"
@callback
@bind_hass
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
def get_async_client(
hass: HomeAssistant,
verify_ssl: bool = True,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
) -> httpx.AsyncClient:
"""Return default httpx AsyncClient.
This method must be run in the event loop.
"""
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
if (client := hass.data.get(key)) is None:
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 to get a client configured for HTTP/2.
Clients are cached separately by ALPN protocol to ensure proper SSL context
configuration (ALPN protocols differ between HTTP versions).
"""
client_key = (verify_ssl, alpn_protocols)
clients = hass.data.setdefault(DATA_ASYNC_CLIENT, {})
if (client := clients.get(client_key)) is None:
client = clients[client_key] = create_async_httpx_client(
hass, verify_ssl, alpn_protocols=alpn_protocols
)
return client
@@ -77,6 +91,7 @@ def create_async_httpx_client(
verify_ssl: bool = True,
auto_cleanup: bool = True,
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
**kwargs: Any,
) -> httpx.AsyncClient:
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
@@ -84,13 +99,22 @@ def create_async_httpx_client(
If auto_cleanup is False, the client will be
automatically closed on homeassistant_stop.
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 for HTTP/2 support (automatically
enables httpx http2 mode).
This method must be run in the event loop.
"""
# Use the requested ALPN protocols directly to ensure proper SSL context
# bucketing. httpx/httpcore mutates SSL contexts by calling set_alpn_protocols(),
# so we pre-set the correct protocols to prevent shared context corruption.
ssl_context = (
client_context(ssl_cipher_list)
client_context(ssl_cipher_list, alpn_protocols)
if verify_ssl
else create_no_verify_ssl_context(ssl_cipher_list)
else create_no_verify_ssl_context(ssl_cipher_list, alpn_protocols)
)
# Enable httpx HTTP/2 mode when HTTP/2 protocol is requested
if alpn_protocols == SSL_ALPN_HTTP11_HTTP2:
kwargs.setdefault("http2", True)
client = HassHttpXAsyncClient(
verify=ssl_context,
headers={USER_AGENT: SERVER_SOFTWARE},

View File

@@ -1601,13 +1601,8 @@ class Script:
):
_referenced_extract_ids(data, target, referenced)
elif action == cv.SCRIPT_ACTION_CHECK_CONDITION:
referenced |= condition.async_extract_targets(step, target)
elif action == cv.SCRIPT_ACTION_CHOOSE:
for choice in step[CONF_CHOOSE]:
for cond in choice[CONF_CONDITIONS]:
referenced |= condition.async_extract_targets(cond, target)
Script._find_referenced_target(
target, referenced, choice[CONF_SEQUENCE]
)
@@ -1617,8 +1612,6 @@ class Script:
)
elif action == cv.SCRIPT_ACTION_IF:
for cond in step[CONF_IF]:
referenced |= condition.async_extract_targets(cond, target)
Script._find_referenced_target(target, referenced, step[CONF_THEN])
if CONF_ELSE in step:
Script._find_referenced_target(target, referenced, step[CONF_ELSE])

View File

@@ -8,6 +8,17 @@ import ssl
import certifi
# Type alias for ALPN protocols tuple (None means no ALPN protocols set)
type SSLALPNProtocols = tuple[str, ...] | None
# ALPN protocol configurations
# No ALPN protocols - used for libraries that don't support/need ALPN (e.g., aioimap)
SSL_ALPN_NONE: SSLALPNProtocols = None
# HTTP/1.1 only - used by default and for aiohttp (which doesn't support HTTP/2)
SSL_ALPN_HTTP11: SSLALPNProtocols = ("http/1.1",)
# HTTP/1.1 with HTTP/2 support - used when httpx http2=True
SSL_ALPN_HTTP11_HTTP2: SSLALPNProtocols = ("http/1.1", "h2")
class SSLCipherList(StrEnum):
"""SSL cipher lists."""
@@ -64,7 +75,10 @@ SSL_CIPHER_LISTS = {
@cache
def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
def _client_context_no_verify(
ssl_cipher_list: SSLCipherList,
alpn_protocols: SSLALPNProtocols,
) -> ssl.SSLContext:
# This is a copy of aiohttp's create_default_context() function, with the
# ssl verify turned off.
# https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
@@ -78,12 +92,18 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
sslcontext.set_default_verify_paths()
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
# from mutating the shared SSL context with different protocol settings.
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
if alpn_protocols is not None:
sslcontext.set_alpn_protocols(list(alpn_protocols))
return sslcontext
def _create_client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an independent SSL context for making requests."""
# Reuse environment variable definition from requests, since it's already a
@@ -96,6 +116,11 @@ def _create_client_context(
)
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
# from mutating the shared SSL context with different protocol settings.
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
if alpn_protocols is not None:
sslcontext.set_alpn_protocols(list(alpn_protocols))
return sslcontext
@@ -103,63 +128,63 @@ def _create_client_context(
@cache
def _client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
# Cached version of _create_client_context
return _create_client_context(ssl_cipher_list)
return _create_client_context(ssl_cipher_list, alpn_protocols)
# Create this only once and reuse it
_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT)
_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT)
_NO_VERIFY_SSL_CONTEXTS = {
SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE),
SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN),
SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE),
}
_SSL_CONTEXTS = {
SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE),
SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN),
SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE),
}
# Pre-warm the cache for ALL SSL context configurations at module load time.
# This is critical because creating SSL contexts loads certificates from disk,
# which is blocking I/O that must not happen in the event loop.
_SSL_ALPN_PROTOCOLS = (SSL_ALPN_NONE, SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2)
for _cipher in SSLCipherList:
for _alpn in _SSL_ALPN_PROTOCOLS:
_client_context(_cipher, _alpn)
_client_context_no_verify(_cipher, _alpn)
def get_default_context() -> ssl.SSLContext:
"""Return the default SSL context."""
return _DEFAULT_SSL_CONTEXT
return _client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
def get_default_no_verify_context() -> ssl.SSLContext:
"""Return the default SSL context that does not verify the server certificate."""
return _DEFAULT_NO_VERIFY_SSL_CONTEXT
return _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
def client_context_no_verify(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return a SSL context with no verification with a specific ssl cipher."""
return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT)
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
def client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an SSL context for making requests."""
return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT)
return _client_context(ssl_cipher_list, alpn_protocols)
def create_client_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an independent SSL context for making requests."""
# This explicitly uses the non-cached version to create a client context
return _create_client_context(ssl_cipher_list)
return _create_client_context(ssl_cipher_list, alpn_protocols)
def create_no_verify_ssl_context(
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
) -> ssl.SSLContext:
"""Return an SSL context that does not verify the server certificate."""
return _client_context_no_verify(ssl_cipher_list)
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
def server_context_modern() -> ssl.SSLContext:

View File

@@ -103,8 +103,6 @@ _AMBIENT_IDEAL_GAS_MOLAR_VOLUME = ( # m3⋅mol⁻¹
)
# Molar masses in g⋅mol⁻¹
_CARBON_MONOXIDE_MOLAR_MASS = 28.01
_NITROGEN_DIOXIDE_MOLAR_MASS = 46.0055
_OZONE_MOLAR_MASS = 48.00
_SULPHUR_DIOXIDE_MOLAR_MASS = 64.066
@@ -213,22 +211,6 @@ class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
}
class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
"""Convert nitrogen dioxide ratio to mass per volume."""
UNIT_CLASS = "nitrogen_dioxide"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_NITROGEN_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
"""Convert sulphur dioxide ratio to mass per volume."""
@@ -563,22 +545,6 @@ class ReactivePowerConverter(BaseUnitConverter):
}
class OzoneConcentrationConverter(BaseUnitConverter):
"""Convert ozone ratio to mass per volume."""
UNIT_CLASS = "ozone"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}
class SpeedConverter(BaseUnitConverter):
"""Utility to convert speed values."""

8
requirements_all.txt generated
View File

@@ -334,7 +334,7 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.ntfy
aiontfy==0.7.0
aiontfy==0.6.1
# homeassistant.components.nut
aionut==4.3.4
@@ -1104,7 +1104,7 @@ google-nest-sdm==9.1.2
google-photos-library-api==0.12.1
# homeassistant.components.google_air_quality
google_air_quality_api==3.0.0
google_air_quality_api==2.1.2
# homeassistant.components.slide
# homeassistant.components.slide_local
@@ -1281,7 +1281,7 @@ ihcsdk==2.8.5
imeon_inverter_api==0.4.0
# homeassistant.components.imgw_pib
imgw_pib==2.0.1
imgw_pib==1.6.0
# homeassistant.components.incomfort
incomfort-client==0.6.11
@@ -3080,7 +3080,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==10.0.0
uiprotect==8.1.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -319,7 +319,7 @@ aionanoleaf==0.2.1
aionotion==2024.03.0
# homeassistant.components.ntfy
aiontfy==0.7.0
aiontfy==0.6.1
# homeassistant.components.nut
aionut==4.3.4
@@ -980,7 +980,7 @@ google-nest-sdm==9.1.2
google-photos-library-api==0.12.1
# homeassistant.components.google_air_quality
google_air_quality_api==3.0.0
google_air_quality_api==2.1.2
# homeassistant.components.slide
# homeassistant.components.slide_local
@@ -1127,7 +1127,7 @@ igloohome-api==0.1.1
imeon_inverter_api==0.4.0
# homeassistant.components.imgw_pib
imgw_pib==2.0.1
imgw_pib==1.6.0
# homeassistant.components.incomfort
incomfort-client==0.6.11
@@ -2577,7 +2577,7 @@ typedmonarchmoney==0.4.4
uasiren==0.0.1
# homeassistant.components.unifiprotect
uiprotect==10.0.0
uiprotect==8.1.1
# homeassistant.components.landisgyr_heat_meter
ultraheat-api==0.5.7

View File

@@ -1,156 +0,0 @@
"""Test device tracker conditions."""
from typing import Any
import pytest
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant
from tests.components import (
ConditionStateDescription,
assert_condition_gated_by_labs_flag,
create_target_condition,
parametrize_condition_states_all,
parametrize_condition_states_any,
parametrize_target_entities,
set_or_remove_state,
target_entities,
)
@pytest.fixture
async def target_device_trackers(hass: HomeAssistant) -> list[str]:
"""Create multiple device tracker entities associated with different targets."""
return (await target_entities(hass, "device_tracker"))["included"]
@pytest.mark.parametrize(
"condition",
[
"device_tracker.is_home",
"device_tracker.is_not_home",
],
)
async def test_device_tracker_conditions_gated_by_labs_flag(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, condition: str
) -> None:
"""Test the device tracker conditions are gated by the labs flag."""
await assert_condition_gated_by_labs_flag(hass, caplog, condition)
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[
*parametrize_condition_states_any(
condition="device_tracker.is_home",
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
),
*parametrize_condition_states_any(
condition="device_tracker.is_not_home",
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
),
],
)
async def test_device_tracker_state_condition_behavior_any(
hass: HomeAssistant,
target_device_trackers: list[str],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
condition: str,
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker state condition with the 'any' behavior."""
other_entity_ids = set(target_device_trackers) - {entity_id}
# Set all device trackers, including the tested one, to the initial state
for eid in target_device_trackers:
set_or_remove_state(hass, eid, states[0]["included"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
condition=condition,
target=condition_target_config,
behavior="any",
)
for state in states:
included_state = state["included"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
# Check if changing other device trackers also passes the condition
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[
*parametrize_condition_states_all(
condition="device_tracker.is_home",
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
),
*parametrize_condition_states_all(
condition="device_tracker.is_not_home",
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
),
],
)
async def test_device_tracker_state_condition_behavior_all(
hass: HomeAssistant,
target_device_trackers: list[str],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
condition: str,
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker state condition with the 'all' behavior."""
other_entity_ids = set(target_device_trackers) - {entity_id}
# Set all device trackers, including the tested one, to the initial state
for eid in target_device_trackers:
set_or_remove_state(hass, eid, states[0]["included"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
condition=condition,
target=condition_target_config,
behavior="all",
)
for state in states:
included_state = state["included"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true_first_entity"]
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]

View File

@@ -132,9 +132,6 @@
'name': None,
'object_id_base': 'Carbon monoxide',
'options': dict({
'sensor.private': dict({
'suggested_unit_of_measurement': 'ppm',
}),
}),
'original_device_class': <SensorDeviceClass.CO: 'carbon_monoxide'>,
'original_icon': None,
@@ -321,14 +318,14 @@
'object_id_base': 'Nitrogen dioxide',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.NITROGEN_DIOXIDE: 'nitrogen_dioxide'>,
'original_device_class': None,
'original_icon': None,
'original_name': 'Nitrogen dioxide',
'platform': 'google_air_quality',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'nitrogen_dioxide',
'unique_id': 'no2_10.1_20.1',
'unit_of_measurement': 'ppb',
})
@@ -337,7 +334,6 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by Google Air Quality',
'device_class': 'nitrogen_dioxide',
'friendly_name': 'Home Nitrogen dioxide',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'ppb',
@@ -648,14 +644,14 @@
'object_id_base': 'Sulphur dioxide',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.SULPHUR_DIOXIDE: 'sulphur_dioxide'>,
'original_device_class': None,
'original_icon': None,
'original_name': 'Sulphur dioxide',
'platform': 'google_air_quality',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'sulphur_dioxide',
'unique_id': 'so2_10.1_20.1',
'unit_of_measurement': 'ppb',
})
@@ -664,7 +660,6 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'attribution': 'Data provided by Google Air Quality',
'device_class': 'sulphur_dioxide',
'friendly_name': 'Home Sulphur dioxide',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'ppb',

View File

@@ -130,44 +130,3 @@ async def test_hassio_addon_panel_api(
"test1",
{"enable": True, "title": "Test", "icon": "mdi:test", "admin": False},
)
@pytest.mark.usefixtures("hassio_env")
async def test_hassio_addon_panel_registration(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test panel registration calls frontend.async_register_built_in_panel."""
aioclient_mock.get(
"http://127.0.0.1/ingress/panels",
json={
"result": "ok",
"data": {
"panels": {
"test_addon": {
"enable": True,
"title": "Test Addon",
"icon": "mdi:test-tube",
"admin": True,
},
}
},
},
)
with patch(
"homeassistant.components.hassio.addon_panel.frontend.async_register_built_in_panel"
) as mock_register:
await async_setup_component(hass, "hassio", {})
await hass.async_block_till_done()
# Verify that async_register_built_in_panel was called with correct arguments
# for our test addon
mock_register.assert_any_call(
hass,
"app",
frontend_url_path="test_addon",
sidebar_title="Test Addon",
sidebar_icon="mdi:test-tube",
require_admin=True,
config={"addon": "test_addon"},
)

View File

@@ -3107,8 +3107,10 @@ def test_device_class_converters_are_complete() -> None:
SensorDeviceClass.IRRADIANCE,
SensorDeviceClass.MOISTURE,
SensorDeviceClass.MONETARY,
SensorDeviceClass.NITROGEN_DIOXIDE,
SensorDeviceClass.NITROGEN_MONOXIDE,
SensorDeviceClass.NITROUS_OXIDE,
SensorDeviceClass.OZONE,
SensorDeviceClass.PH,
SensorDeviceClass.PM1,
SensorDeviceClass.PM10,

View File

@@ -19,14 +19,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
from tests.typing import RecorderInstanceContextManager
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceContextManager,
) -> None:
"""Set up recorder before hass fixture runs."""
def create_tibber_device(
@@ -166,15 +158,21 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry:
@pytest.fixture
def tibber_mock() -> AsyncGenerator[MagicMock]:
def _tibber_patches() -> AsyncGenerator[tuple[MagicMock, MagicMock]]:
"""Patch the Tibber libraries used by the integration."""
unique_user_id = "unique_user_id"
title = "title"
with patch(
"tibber.Tibber",
autospec=True,
) as mock_tibber:
with (
patch(
"tibber.Tibber",
autospec=True,
) as mock_tibber,
patch(
"tibber.data_api.TibberDataAPI",
autospec=True,
) as mock_data_api_client,
):
tibber_mock = mock_tibber.return_value
tibber_mock.update_info = AsyncMock(return_value=True)
tibber_mock.user_id = unique_user_id
@@ -182,21 +180,24 @@ def tibber_mock() -> AsyncGenerator[MagicMock]:
tibber_mock.send_notification = AsyncMock()
tibber_mock.rt_disconnect = AsyncMock()
tibber_mock.get_homes = MagicMock(return_value=[])
tibber_mock.set_access_token = MagicMock()
data_api_mock = MagicMock()
data_api_mock.get_all_devices = AsyncMock(return_value={})
data_api_mock.update_devices = AsyncMock(return_value={})
data_api_mock.get_userinfo = AsyncMock()
tibber_mock.data_api = data_api_mock
data_api_client_mock = mock_data_api_client.return_value
data_api_client_mock.get_all_devices = AsyncMock(return_value={})
data_api_client_mock.update_devices = AsyncMock(return_value={})
yield tibber_mock
yield tibber_mock, data_api_client_mock
@pytest.fixture
def data_api_client_mock(tibber_mock: MagicMock) -> MagicMock:
def tibber_mock(_tibber_patches: tuple[MagicMock, MagicMock]) -> MagicMock:
"""Return the patched Tibber connection mock."""
return _tibber_patches[0]
@pytest.fixture
def data_api_client_mock(_tibber_patches: tuple[MagicMock, MagicMock]) -> MagicMock:
"""Return the patched Tibber Data API client mock."""
return tibber_mock.data_api
return _tibber_patches[1]
@pytest.fixture

View File

@@ -19,6 +19,7 @@ from homeassistant.components.tibber.application_credentials import TOKEN_URL
from homeassistant.components.tibber.config_flow import (
DATA_API_DEFAULT_SCOPES,
ERR_CLIENT,
ERR_TIMEOUT,
ERR_TOKEN,
)
from homeassistant.components.tibber.const import AUTH_IMPLEMENTATION, DOMAIN
@@ -54,164 +55,226 @@ def _mock_tibber(
return tibber_mock
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_show_config_form(recorder_mock: Recorder, hass: HomeAssistant) -> None:
"""Test show configuration form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
@pytest.mark.parametrize(
("exception", "expected_error"),
[
(builtins.TimeoutError(), ERR_TIMEOUT),
(ClientError(), ERR_CLIENT),
(InvalidLoginError(401), ERR_TOKEN),
(RetryableHttpExceptionError(503), ERR_CLIENT),
(FatalHttpExceptionError(404), ERR_CLIENT),
],
)
async def test_oauth_create_entry_abort_exceptions(
async def test_graphql_step_exceptions(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
exception: Exception,
expected_error: str,
) -> None:
"""Validate fatal errors during OAuth finalization abort the flow."""
"""Validate GraphQL errors are surfaced."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
_mock_tibber(tibber_mock, update_side_effect=exception)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "invalid"}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
assert result["errors"][CONF_ACCESS_TOKEN] == expected_error
async def test_flow_entry_already_exists(
recorder_mock: Recorder,
hass: HomeAssistant,
config_entry,
tibber_mock: MagicMock,
) -> None:
"""Test user input for config_entry that already exists."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock, user_id="tibber")
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
async def test_reauth_flow_steps(
recorder_mock: Recorder,
hass: HomeAssistant,
config_entry: MockConfigEntry,
) -> None:
"""Test the reauth flow goes through reauth_confirm to user step."""
reauth_flow = await config_entry.start_reauth_flow(hass)
assert reauth_flow["type"] is FlowResultType.FORM
assert reauth_flow["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(reauth_flow["flow_id"])
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
reauth_flow["flow_id"],
user_input={},
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "user"
async def test_oauth_create_entry_missing_configuration(
recorder_mock: Recorder,
hass: HomeAssistant,
) -> None:
"""Abort OAuth finalize if GraphQL step did not run."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
flow_result = await handler.async_oauth_create_entry(
{CONF_TOKEN: {CONF_ACCESS_TOKEN: "rest-token"}}
)
assert flow_result["type"] is FlowResultType.ABORT
assert flow_result["reason"] == expected_error
assert flow_result["reason"] == "missing_configuration"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
@pytest.mark.parametrize(
"exception",
[
builtins.TimeoutError(),
ClientError(),
RetryableHttpExceptionError(503),
],
)
async def test_oauth_create_entry_connection_error_retry(
async def test_oauth_create_entry_cannot_connect_userinfo(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
exception: Exception,
data_api_client_mock: MagicMock,
) -> None:
"""Validate transient connection errors show retry form."""
"""Abort OAuth finalize when Data API userinfo cannot be retrieved."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
DOMAIN,
context={"source": config_entries.SOURCE_USER},
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
handler._access_token = "graphql-token"
data_api_client_mock.get_userinfo = AsyncMock(side_effect=ClientError())
flow_result = await handler.async_oauth_create_entry(
{CONF_TOKEN: {CONF_ACCESS_TOKEN: "rest-token"}}
)
_mock_tibber(tibber_mock, update_side_effect=exception)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "connection_error"
tibber_mock.update_info.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Mock Name"
assert flow_result["type"] is FlowResultType.ABORT
assert flow_result["reason"] == "cannot_connect"
async def test_data_api_requires_credentials(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
) -> None:
"""Abort when OAuth credentials are missing."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "missing_credentials"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_data_api_extra_authorize_scope(
recorder_mock: Recorder,
hass: HomeAssistant,
tibber_mock: MagicMock,
) -> None:
"""Ensure the OAuth implementation requests Tibber scopes."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("homeassistant.components.recorder.async_setup", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
assert handler.extra_authorize_data["scope"] == " ".join(DATA_API_DEFAULT_SCOPES)
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "valid"}
)
handler = hass.config_entries.flow._progress[result["flow_id"]]
assert handler.extra_authorize_data["scope"] == " ".join(
DATA_API_DEFAULT_SCOPES
)
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_full_flow_success(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
data_api_client_mock: MagicMock,
) -> None:
"""Test configuring Tibber via OAuth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
"""Test configuring Tibber via GraphQL + OAuth."""
with patch("homeassistant.components.recorder.async_setup", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
_mock_tibber(tibber_mock)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
_mock_tibber(tibber_mock)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "graphql-token"}
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
result = await hass.config_entries.flow.async_configure(result["flow_id"])
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
data = result["data"]
assert data[CONF_TOKEN]["access_token"] == "mock-access-token"
assert data[AUTH_IMPLEMENTATION] == DOMAIN
assert result["title"] == "Mock Name"
data_api_client_mock.get_userinfo = AsyncMock(
return_value={"name": "Mock Name"}
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.CREATE_ENTRY
data = result["data"]
assert data[CONF_TOKEN]["access_token"] == "mock-access-token"
assert data[CONF_ACCESS_TOKEN] == "graphql-token"
assert data[AUTH_IMPLEMENTATION] == DOMAIN
assert result["title"] == "Mock Name"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_data_api_abort_when_already_configured(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Ensure only a single Data API entry can be configured."""
@@ -220,6 +283,7 @@ async def test_data_api_abort_when_already_configured(
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "existing"},
CONF_ACCESS_TOKEN: "stored-graphql",
},
unique_id="unique_user_id",
title="Existing",
@@ -231,133 +295,9 @@ async def test_data_api_abort_when_already_configured(
)
_mock_tibber(tibber_mock)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "mock-access-token",
"refresh_token": "mock-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "new-token"}
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_reauth_flow_success(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Test successful reauthentication flow."""
existing_entry = MockConfigEntry(
domain=DOMAIN,
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "old-token"},
},
unique_id="unique_user_id",
title="Existing",
)
existing_entry.add_to_hass(hass)
result = await existing_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
_mock_tibber(tibber_mock)
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "new-access-token",
"refresh_token": "new-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert existing_entry.data[CONF_TOKEN]["access_token"] == "new-access-token"
@pytest.mark.usefixtures("setup_credentials", "current_request_with_host")
async def test_reauth_flow_wrong_account(
recorder_mock: Recorder,
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
tibber_mock: MagicMock,
) -> None:
"""Test reauthentication with wrong account aborts."""
existing_entry = MockConfigEntry(
domain=DOMAIN,
data={
AUTH_IMPLEMENTATION: DOMAIN,
CONF_TOKEN: {"access_token": "old-token"},
},
unique_id="original_user_id",
title="Existing",
)
existing_entry.add_to_hass(hass)
result = await existing_entry.start_reauth_flow(hass)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
# Mock a different user_id than the existing entry
_mock_tibber(tibber_mock, user_id="different_user_id")
assert result["type"] is FlowResultType.EXTERNAL_STEP
authorize_url = result["url"]
state = parse_qs(urlparse(authorize_url).query)["state"][0]
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == HTTPStatus.OK
aioclient_mock.post(
TOKEN_URL,
json={
"access_token": "new-access-token",
"refresh_token": "new-refresh-token",
"token_type": "bearer",
"expires_in": 3600,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "wrong_account"

View File

@@ -36,18 +36,19 @@ async def test_data_api_runtime_creates_client(hass: HomeAssistant) -> None:
runtime = TibberRuntimeData(
session=session,
tibber_connection=MagicMock(),
)
with patch("homeassistant.components.tibber.tibber.Tibber") as mock_client_cls:
with patch(
"homeassistant.components.tibber.tibber_data_api.TibberDataAPI"
) as mock_client_cls:
mock_client = MagicMock()
mock_client.set_access_token = MagicMock()
mock_client_cls.return_value = mock_client
client = await runtime.async_get_client(hass)
mock_client_cls.assert_called_once_with(
access_token="access-token", websession=ANY, time_zone=ANY, ssl=ANY
)
mock_client_cls.assert_called_once_with("access-token", websession=ANY)
session.async_ensure_token_valid.assert_awaited_once()
mock_client.set_access_token.assert_called_once_with("access-token")
assert client is mock_client
@@ -72,6 +73,7 @@ async def test_data_api_runtime_missing_token_raises(hass: HomeAssistant) -> Non
runtime = TibberRuntimeData(
session=session,
tibber_connection=MagicMock(),
)
with pytest.raises(ConfigEntryAuthFailed):

View File

@@ -22,6 +22,7 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client as client
from homeassistant.util import ssl as ssl_util
from homeassistant.util.color import RGBColor
from homeassistant.util.ssl import SSLCipherList
@@ -413,3 +414,29 @@ async def test_resolver_is_singleton(hass: HomeAssistant) -> None:
assert isinstance(session3._connector, aiohttp.TCPConnector)
assert session._connector._resolver is session2._connector._resolver
assert session._connector._resolver is session3._connector._resolver
async def test_connector_uses_http11_alpn(hass: HomeAssistant) -> None:
"""Test that connector uses HTTP/1.1 ALPN protocols."""
with patch.object(
ssl_util, "client_context", wraps=ssl_util.client_context
) as mock_client_context:
client.async_get_clientsession(hass)
# Verify client_context was called with HTTP/1.1 ALPN
mock_client_context.assert_called_once_with(
SSLCipherList.PYTHON_DEFAULT, ssl_util.SSL_ALPN_HTTP11
)
async def test_connector_no_verify_uses_http11_alpn(hass: HomeAssistant) -> None:
"""Test that connector without SSL verification uses HTTP/1.1 ALPN protocols."""
with patch.object(
ssl_util, "client_context_no_verify", wraps=ssl_util.client_context_no_verify
) as mock_client_context_no_verify:
client.async_get_clientsession(hass, verify_ssl=False)
# Verify client_context_no_verify was called with HTTP/1.1 ALPN
mock_client_context_no_verify.assert_called_once_with(
SSLCipherList.PYTHON_DEFAULT, ssl_util.SSL_ALPN_HTTP11
)

View File

@@ -8,6 +8,7 @@ import pytest
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
from homeassistant.core import HomeAssistant
from homeassistant.helpers import httpx_client as client
from homeassistant.util.ssl import SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2
from tests.common import MockModule, extract_stack_to_frame, mock_integration
@@ -16,14 +17,20 @@ async def test_get_async_client_with_ssl(hass: HomeAssistant) -> None:
"""Test init async client with ssl."""
client.get_async_client(hass)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
async def test_get_async_client_without_ssl(hass: HomeAssistant) -> None:
"""Test init async client without ssl."""
client.get_async_client(hass, verify_ssl=False)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
async def test_create_async_httpx_client_with_ssl_and_cookies(
@@ -34,7 +41,7 @@ async def test_create_async_httpx_client_with_ssl_and_cookies(
httpx_client = client.create_async_httpx_client(hass, cookies={"bla": True})
assert isinstance(httpx_client, httpx.AsyncClient)
assert hass.data[client.DATA_ASYNC_CLIENT] != httpx_client
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)] != httpx_client
async def test_create_async_httpx_client_without_ssl_and_cookies(
@@ -47,31 +54,37 @@ async def test_create_async_httpx_client_without_ssl_and_cookies(
hass, verify_ssl=False, cookies={"bla": True}
)
assert isinstance(httpx_client, httpx.AsyncClient)
assert hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY] != httpx_client
assert hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)] != httpx_client
async def test_get_async_client_cleanup(hass: HomeAssistant) -> None:
"""Test init async client with ssl."""
client.get_async_client(hass)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
await hass.async_block_till_done()
assert hass.data[client.DATA_ASYNC_CLIENT].is_closed
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)].is_closed
async def test_get_async_client_cleanup_without_ssl(hass: HomeAssistant) -> None:
"""Test init async client without ssl."""
client.get_async_client(hass, verify_ssl=False)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
await hass.async_block_till_done()
assert hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY].is_closed
assert hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)].is_closed
async def test_get_async_client_patched_close(hass: HomeAssistant) -> None:
@@ -79,7 +92,10 @@ async def test_get_async_client_patched_close(hass: HomeAssistant) -> None:
with patch("httpx.AsyncClient.aclose") as mock_aclose:
httpx_session = client.get_async_client(hass)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
with pytest.raises(RuntimeError):
await httpx_session.aclose()
@@ -92,7 +108,10 @@ async def test_get_async_client_context_manager(hass: HomeAssistant) -> None:
with patch("httpx.AsyncClient.aclose") as mock_aclose:
httpx_session = client.get_async_client(hass)
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
async with httpx_session:
pass
@@ -100,6 +119,80 @@ async def test_get_async_client_context_manager(hass: HomeAssistant) -> None:
assert mock_aclose.call_count == 0
async def test_get_async_client_http2(hass: HomeAssistant) -> None:
"""Test init async client with HTTP/2 support."""
http1_client = client.get_async_client(hass)
http2_client = client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
# HTTP/1.1 and HTTP/2 clients should be different (different SSL contexts)
assert http1_client is not http2_client
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
httpx.AsyncClient,
)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)],
httpx.AsyncClient,
)
# Same parameters should return cached client
assert client.get_async_client(hass) is http1_client
assert (
client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
is http2_client
)
async def test_get_async_client_http2_cleanup(hass: HomeAssistant) -> None:
"""Test cleanup of HTTP/2 async client."""
client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)],
httpx.AsyncClient,
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
await hass.async_block_till_done()
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)].is_closed
async def test_get_async_client_http2_without_ssl(hass: HomeAssistant) -> None:
"""Test init async client with HTTP/2 and without SSL."""
http2_client = client.get_async_client(
hass, verify_ssl=False, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
)
assert isinstance(
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11_HTTP2)],
httpx.AsyncClient,
)
# Same parameters should return cached client
assert (
client.get_async_client(
hass, verify_ssl=False, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
)
is http2_client
)
async def test_create_async_httpx_client_http2(hass: HomeAssistant) -> None:
"""Test create async client with HTTP/2 uses correct ALPN protocols."""
http1_client = client.create_async_httpx_client(hass)
http2_client = client.create_async_httpx_client(
hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
)
# Different clients (not cached)
assert http1_client is not http2_client
# Both should be valid clients
assert isinstance(http1_client, httpx.AsyncClient)
assert isinstance(http2_client, httpx.AsyncClient)
async def test_warning_close_session_integration(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:

View File

@@ -4209,16 +4209,6 @@ async def test_referenced_labels(hass: HomeAssistant) -> None:
"data_template": {"label_id": "label_in_data_template"},
},
{"action": "test.script", "data": {"without": "label_id"}},
{
"condition": "light.is_on",
"target": {"label_id": "label_condition_target"},
},
{
"condition": "light.is_on",
"target": {
"label_id": ["label_condition_list_1", "label_condition_list_2"]
},
},
{
"choose": [
{
@@ -4231,10 +4221,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None:
],
},
{
"conditions": {
"condition": "light.is_on",
"target": {"label_id": "label_choice_2_cond"},
},
"conditions": "{{ true == false }}",
"sequence": [
{
"action": "test.script",
@@ -4253,10 +4240,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None:
{"event": "test_event"},
{"delay": "{{ delay_period }}"},
{
"if": {
"condition": "light.is_on",
"target": {"label_id": "label_if_cond"},
},
"if": [],
"then": [
{
"action": "test.script",
@@ -4293,22 +4277,17 @@ async def test_referenced_labels(hass: HomeAssistant) -> None:
)
assert script_obj.referenced_labels == {
"label_choice_1_seq",
"label_choice_2_cond",
"label_choice_2_seq",
"label_condition_list_1",
"label_condition_list_2",
"label_condition_target",
"label_default_seq",
"label_if_cond",
"label_if_else",
"label_if_then",
"label_in_data_template",
"label_in_target",
"label_parallel",
"label_sequence",
"label_service_list_1",
"label_service_list_2",
"label_service_not_list",
"label_if_then",
"label_if_else",
"label_parallel",
"label_sequence",
}
# Test we cache results.
assert script_obj.referenced_labels is script_obj.referenced_labels
@@ -4341,16 +4320,6 @@ async def test_referenced_floors(hass: HomeAssistant) -> None:
"data_template": {"floor_id": "floor_in_data_template"},
},
{"action": "test.script", "data": {"without": "floor_id"}},
{
"condition": "light.is_on",
"target": {"floor_id": "floor_condition_target"},
},
{
"condition": "light.is_on",
"target": {
"floor_id": ["floor_condition_list_1", "floor_condition_list_2"]
},
},
{
"choose": [
{
@@ -4363,10 +4332,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None:
],
},
{
"conditions": {
"condition": "light.is_on",
"target": {"floor_id": "floor_choice_2_cond"},
},
"conditions": "{{ true == false }}",
"sequence": [
{
"action": "test.script",
@@ -4385,10 +4351,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None:
{"event": "test_event"},
{"delay": "{{ delay_period }}"},
{
"if": {
"condition": "light.is_on",
"target": {"floor_id": "floor_if_cond"},
},
"if": [],
"then": [
{
"action": "test.script",
@@ -4425,21 +4388,16 @@ async def test_referenced_floors(hass: HomeAssistant) -> None:
)
assert script_obj.referenced_floors == {
"floor_choice_1_seq",
"floor_choice_2_cond",
"floor_choice_2_seq",
"floor_condition_list_1",
"floor_condition_list_2",
"floor_condition_target",
"floor_default_seq",
"floor_if_cond",
"floor_if_else",
"floor_if_then",
"floor_in_data_template",
"floor_in_target",
"floor_parallel",
"floor_sequence",
"floor_service_list",
"floor_service_not_list",
"floor_if_then",
"floor_if_else",
"floor_parallel",
"floor_sequence",
}
# Test we cache results.
assert script_obj.referenced_floors is script_obj.referenced_floors
@@ -4472,16 +4430,6 @@ async def test_referenced_areas(hass: HomeAssistant) -> None:
"data_template": {"area_id": "area_in_data_template"},
},
{"action": "test.script", "data": {"without": "area_id"}},
{
"condition": "light.is_on",
"target": {"area_id": "area_condition_target"},
},
{
"condition": "light.is_on",
"target": {
"area_id": ["area_condition_list_1", "area_condition_list_2"]
},
},
{
"choose": [
{
@@ -4494,10 +4442,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None:
],
},
{
"conditions": {
"condition": "light.is_on",
"target": {"area_id": "area_choice_2_cond"},
},
"conditions": "{{ true == false }}",
"sequence": [
{
"action": "test.script",
@@ -4516,10 +4461,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None:
{"event": "test_event"},
{"delay": "{{ delay_period }}"},
{
"if": {
"condition": "light.is_on",
"target": {"area_id": "area_if_cond"},
},
"if": [],
"then": [
{
"action": "test.script",
@@ -4556,21 +4498,16 @@ async def test_referenced_areas(hass: HomeAssistant) -> None:
)
assert script_obj.referenced_areas == {
"area_choice_1_seq",
"area_choice_2_cond",
"area_choice_2_seq",
"area_condition_list_1",
"area_condition_list_2",
"area_condition_target",
"area_default_seq",
"area_if_cond",
"area_if_else",
"area_if_then",
"area_in_data_template",
"area_in_target",
"area_parallel",
"area_sequence",
"area_service_list",
"area_service_not_list",
"area_if_then",
"area_if_else",
"area_parallel",
"area_sequence",
# 'area_service_template', # no area extraction from template
}
# Test we cache results.
@@ -4671,19 +4608,6 @@ async def test_referenced_entities(hass: HomeAssistant) -> None:
}
],
},
{
"condition": "light.is_on",
"target": {"entity_id": "light.condition_target"},
},
{
"condition": "light.is_on",
"target": {
"entity_id": [
"light.condition_list_1",
"light.condition_list_2",
]
},
},
{
"sequence": [
{
@@ -4702,9 +4626,6 @@ async def test_referenced_entities(hass: HomeAssistant) -> None:
"light.choice_1_seq",
"light.choice_2_cond",
"light.choice_2_seq",
"light.condition_list_1",
"light.condition_list_2",
"light.condition_target",
"light.default_seq",
"light.direct_entity_referenced",
"light.entity_in_data_template",
@@ -4735,19 +4656,6 @@ async def test_referenced_devices(hass: HomeAssistant) -> None:
"device_id": "condition-dev-id",
"domain": "switch",
},
{
"condition": "light.is_on",
"target": {"device_id": "condition-target-dev-id"},
},
{
"condition": "light.is_on",
"target": {
"device_id": [
"condition-target-list-1",
"condition-target-list-2",
]
},
},
{
"action": "test.script",
"data": {"device_id": "data-string-id"},
@@ -4845,9 +4753,6 @@ async def test_referenced_devices(hass: HomeAssistant) -> None:
"choice-2-cond-dev-id",
"choice-2-seq-device-target",
"condition-dev-id",
"condition-target-dev-id",
"condition-target-list-1",
"condition-target-list-2",
"data-string-id",
"data-template-string-id",
"default-device-target",

View File

@@ -1,78 +1,58 @@
"""Test Home Assistant ssl utility functions."""
from unittest.mock import MagicMock, Mock, patch
import pytest
from homeassistant.util.ssl import (
SSL_ALPN_HTTP11,
SSL_ALPN_HTTP11_HTTP2,
SSL_ALPN_NONE,
SSLCipherList,
client_context,
client_context_no_verify,
create_client_context,
create_no_verify_ssl_context,
get_default_context,
get_default_no_verify_context,
)
@pytest.fixture
def mock_sslcontext():
"""Mock the ssl lib."""
return MagicMock(set_ciphers=Mock(return_value=True))
def test_client_context(mock_sslcontext) -> None:
"""Test client context."""
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
client_context()
mock_sslcontext.set_ciphers.assert_not_called()
client_context(SSLCipherList.MODERN)
mock_sslcontext.set_ciphers.assert_not_called()
client_context(SSLCipherList.INTERMEDIATE)
mock_sslcontext.set_ciphers.assert_not_called()
client_context(SSLCipherList.INSECURE)
mock_sslcontext.set_ciphers.assert_not_called()
def test_no_verify_ssl_context(mock_sslcontext) -> None:
"""Test no verify ssl context."""
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
create_no_verify_ssl_context()
mock_sslcontext.set_ciphers.assert_not_called()
create_no_verify_ssl_context(SSLCipherList.MODERN)
mock_sslcontext.set_ciphers.assert_not_called()
create_no_verify_ssl_context(SSLCipherList.INTERMEDIATE)
mock_sslcontext.set_ciphers.assert_not_called()
create_no_verify_ssl_context(SSLCipherList.INSECURE)
mock_sslcontext.set_ciphers.assert_not_called()
def test_ssl_context_caching() -> None:
"""Test that SSLContext instances are cached correctly."""
assert client_context() is client_context(SSLCipherList.PYTHON_DEFAULT)
assert create_no_verify_ssl_context() is create_no_verify_ssl_context(
SSLCipherList.PYTHON_DEFAULT
)
def test_create_client_context(mock_sslcontext) -> None:
"""Test create client context."""
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
client_context()
mock_sslcontext.set_ciphers.assert_not_called()
def test_ssl_context_cipher_bucketing() -> None:
"""Test that SSL contexts are bucketed by cipher list."""
default_ctx = client_context(SSLCipherList.PYTHON_DEFAULT)
modern_ctx = client_context(SSLCipherList.MODERN)
intermediate_ctx = client_context(SSLCipherList.INTERMEDIATE)
insecure_ctx = client_context(SSLCipherList.INSECURE)
client_context(SSLCipherList.MODERN)
mock_sslcontext.set_ciphers.assert_not_called()
# Different cipher lists should return different contexts
assert default_ctx is not modern_ctx
assert default_ctx is not intermediate_ctx
assert default_ctx is not insecure_ctx
assert modern_ctx is not intermediate_ctx
assert modern_ctx is not insecure_ctx
assert intermediate_ctx is not insecure_ctx
client_context(SSLCipherList.INTERMEDIATE)
mock_sslcontext.set_ciphers.assert_not_called()
# Same parameters should return cached context
assert client_context(SSLCipherList.PYTHON_DEFAULT) is default_ctx
assert client_context(SSLCipherList.MODERN) is modern_ctx
client_context(SSLCipherList.INSECURE)
mock_sslcontext.set_ciphers.assert_not_called()
def test_no_verify_ssl_context_cipher_bucketing() -> None:
"""Test that no-verify SSL contexts are bucketed by cipher list."""
default_ctx = create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT)
modern_ctx = create_no_verify_ssl_context(SSLCipherList.MODERN)
# Different cipher lists should return different contexts
assert default_ctx is not modern_ctx
# Same parameters should return cached context
assert create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT) is default_ctx
assert create_no_verify_ssl_context(SSLCipherList.MODERN) is modern_ctx
def test_create_client_context_independent() -> None:
@@ -82,3 +62,129 @@ def test_create_client_context_independent() -> None:
independent_context_2 = create_client_context()
assert shared_context is not independent_context_1
assert independent_context_1 is not independent_context_2
def test_ssl_context_alpn_bucketing() -> None:
"""Test that SSL contexts are bucketed by ALPN protocols.
Different ALPN protocol configurations should return different cached contexts
to prevent downstream libraries (e.g., httpx/httpcore) from mutating shared
contexts with incompatible settings.
"""
# HTTP/1.1, HTTP/2, and no-ALPN contexts should all be different
http1_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
http2_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2)
no_alpn_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)
assert http1_context is not http2_context
assert http1_context is not no_alpn_context
assert http2_context is not no_alpn_context
# Same parameters should return cached context
assert (
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11) is http1_context
)
assert (
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2)
is http2_context
)
assert (
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE) is no_alpn_context
)
# No-verify contexts should also be bucketed by ALPN
http1_no_verify = client_context_no_verify(
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11
)
http2_no_verify = client_context_no_verify(
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2
)
no_alpn_no_verify = client_context_no_verify(
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE
)
assert http1_no_verify is not http2_no_verify
assert http1_no_verify is not no_alpn_no_verify
assert http2_no_verify is not no_alpn_no_verify
# create_no_verify_ssl_context should also work with ALPN
assert (
create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
is http1_no_verify
)
assert (
create_no_verify_ssl_context(
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2
)
is http2_no_verify
)
assert (
create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)
is no_alpn_no_verify
)
def test_ssl_context_insecure_alpn_bucketing() -> None:
"""Test that INSECURE cipher list SSL contexts are bucketed by ALPN protocols.
INSECURE cipher list is used by some integrations that need to connect to
devices with outdated TLS implementations.
"""
# HTTP/1.1, HTTP/2, and no-ALPN contexts should all be different
http1_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
http2_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2)
no_alpn_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_NONE)
assert http1_context is not http2_context
assert http1_context is not no_alpn_context
assert http2_context is not no_alpn_context
# Same parameters should return cached context
assert client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11) is http1_context
assert (
client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2) is http2_context
)
assert client_context(SSLCipherList.INSECURE, SSL_ALPN_NONE) is no_alpn_context
# No-verify contexts should also be bucketed by ALPN
http1_no_verify = client_context_no_verify(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
http2_no_verify = client_context_no_verify(
SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2
)
no_alpn_no_verify = client_context_no_verify(SSLCipherList.INSECURE, SSL_ALPN_NONE)
assert http1_no_verify is not http2_no_verify
assert http1_no_verify is not no_alpn_no_verify
assert http2_no_verify is not no_alpn_no_verify
# create_no_verify_ssl_context should also work with ALPN
assert (
create_no_verify_ssl_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
is http1_no_verify
)
assert (
create_no_verify_ssl_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2)
is http2_no_verify
)
assert (
create_no_verify_ssl_context(SSLCipherList.INSECURE, SSL_ALPN_NONE)
is no_alpn_no_verify
)
def test_get_default_context_uses_http1_alpn() -> None:
"""Test that get_default_context returns context with HTTP1 ALPN."""
default_ctx = get_default_context()
default_no_verify_ctx = get_default_no_verify_context()
# Default contexts should be the same as explicitly requesting HTTP1 ALPN
assert default_ctx is client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
assert default_no_verify_ctx is client_context_no_verify(
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11
)
def test_client_context_default_no_alpn() -> None:
"""Test that client_context defaults to no ALPN for backward compatibility."""
# Default (no ALPN) should be different from HTTP1 ALPN
default_ctx = client_context()
http1_ctx = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
assert default_ctx is not http1_ctx
assert default_ctx is client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)

View File

@@ -56,8 +56,6 @@ from homeassistant.util.unit_conversion import (
InformationConverter,
MassConverter,
MassVolumeConcentrationConverter,
NitrogenDioxideConcentrationConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
ReactiveEnergyConverter,
@@ -94,7 +92,6 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = {
InformationConverter,
MassConverter,
ApparentPowerConverter,
OzoneConcentrationConverter,
PowerConverter,
PressureConverter,
ReactiveEnergyConverter,
@@ -106,7 +103,6 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = {
EnergyDistanceConverter,
VolumeConverter,
VolumeFlowRateConverter,
NitrogenDioxideConcentrationConverter,
SulphurDioxideConcentrationConverter,
)
}
@@ -164,16 +160,6 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
1000,
),
NitrogenDioxideConcentrationConverter: (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
1.912503,
),
OzoneConcentrationConverter: (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
1.995417,
),
PowerConverter: (UnitOfPower.WATT, UnitOfPower.KILO_WATT, 1000),
PressureConverter: (UnitOfPressure.HPA, UnitOfPressure.INHG, 33.86389),
ReactiveEnergyConverter: (
@@ -393,20 +379,6 @@ _CONVERTED_VALUE: dict[
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
),
],
NitrogenDioxideConcentrationConverter: [
(
1,
CONCENTRATION_PARTS_PER_BILLION,
1.912503,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
),
(
120,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
62.744976,
CONCENTRATION_PARTS_PER_BILLION,
),
],
ConductivityConverter: [
(
5,
@@ -742,20 +714,6 @@ _CONVERTED_VALUE: dict[
(1, UnitOfMass.STONES, 14, UnitOfMass.POUNDS),
(1, UnitOfMass.STONES, 224, UnitOfMass.OUNCES),
],
OzoneConcentrationConverter: [
(
1,
CONCENTRATION_PARTS_PER_BILLION,
1.995417,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
),
(
120,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
60.1378,
CONCENTRATION_PARTS_PER_BILLION,
),
],
PowerConverter: [
(10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT),
(10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT),