mirror of
https://github.com/home-assistant/core.git
synced 2026-01-20 22:46:58 +01:00
Compare commits
43 Commits
add_switch
...
ssl_contex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d589b9eb8d | ||
|
|
256d47775b | ||
|
|
c9eae821e8 | ||
|
|
7c4cdd57b6 | ||
|
|
8c02268638 | ||
|
|
8436676e67 | ||
|
|
df10ffd508 | ||
|
|
02218fab7b | ||
|
|
6af5698645 | ||
|
|
75db2cde40 | ||
|
|
329dd05434 | ||
|
|
53c53d03e0 | ||
|
|
360b394d03 | ||
|
|
a663d55632 | ||
|
|
3fd266a513 | ||
|
|
442c1d6242 | ||
|
|
0e2aae02f6 | ||
|
|
3227a6e49f | ||
|
|
9d0cfb628b | ||
|
|
4578fe0260 | ||
|
|
0d92708108 | ||
|
|
cceb50071b | ||
|
|
62f296c9dd | ||
|
|
ea1f280494 | ||
|
|
67108a2fc8 | ||
|
|
1ccbd5124e | ||
|
|
818af90a7b | ||
|
|
23bc78fa25 | ||
|
|
0b1cc7638f | ||
|
|
c291a2fbc1 | ||
|
|
7379a4ff4b | ||
|
|
ddcf5cb749 | ||
|
|
4b10a542b0 | ||
|
|
beea9fa74b | ||
|
|
ce8fd16456 | ||
|
|
2172d15489 | ||
|
|
0cfa0ed670 | ||
|
|
f6839913d8 | ||
|
|
a7cfac2618 | ||
|
|
8fa01497ee | ||
|
|
e077c65a77 | ||
|
|
7c49656fa8 | ||
|
|
1730479c8d |
@@ -128,7 +128,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"fan",
|
||||
"light",
|
||||
"siren",
|
||||
"switch",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
@@ -603,6 +602,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced labels."""
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_labels(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
@@ -612,6 +615,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced floors."""
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_floors(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
@@ -621,6 +628,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced areas."""
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_areas(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL
|
||||
from .entity import AqualinkEntity
|
||||
@@ -66,7 +67,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
aqualink = AqualinkClient(username, password, httpx_client=get_async_client(hass))
|
||||
aqualink = AqualinkClient(
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2),
|
||||
)
|
||||
try:
|
||||
await aqualink.login()
|
||||
except AqualinkServiceException as login_exception:
|
||||
|
||||
@@ -15,6 +15,7 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -36,7 +37,11 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with AqualinkClient(
|
||||
username, password, httpx_client=get_async_client(self.hass)
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(
|
||||
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
),
|
||||
):
|
||||
pass
|
||||
except AqualinkServiceUnauthorizedException:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kostal"],
|
||||
"requirements": ["pykoplenti==1.3.0"]
|
||||
"requirements": ["pykoplenti==1.5.0"]
|
||||
}
|
||||
|
||||
@@ -1,24 +1,47 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityNumericalStateAttributeChangedTriggerBase,
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase,
|
||||
Trigger,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from . import ATTR_BRIGHTNESS
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def _convert_uint8_to_percentage(value: Any) -> float:
|
||||
"""Convert a uint8 value (0-255) to a percentage (0-100)."""
|
||||
return (float(value) / 255.0) * 100.0
|
||||
|
||||
|
||||
class BrightnessChangedTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for brightness changed."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
class BrightnessCrossedThresholdTrigger(
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase
|
||||
):
|
||||
"""Trigger for brightness crossed threshold."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"brightness_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_changed": BrightnessChangedTrigger,
|
||||
"brightness_crossed_threshold": BrightnessCrossedThresholdTrigger,
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
@@ -22,7 +22,10 @@
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
max: 100
|
||||
min: 0
|
||||
mode: box
|
||||
unit_of_measurement: "%"
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
|
||||
@@ -8,6 +8,9 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -178,6 +178,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
file,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
session=async_get_clientsession(self._hass),
|
||||
smart_chunk_size=True,
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.1.0"]
|
||||
"requirements": ["onedrive-personal-sdk==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioqsw"],
|
||||
"requirements": ["aioqsw==0.4.2"]
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"codeowners": ["@rabbit-air"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rabbitair",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-rabbitair==0.0.8"],
|
||||
"zeroconf": ["_rabbitair._udp.local."]
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/radiotherm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["radiotherm"],
|
||||
"requirements": ["radiotherm==2.1.0"]
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioraven==0.7.1"],
|
||||
"usb": [
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rapt_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["rapt-ble==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@ashionky"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/refoss",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["refoss-ha==1.2.5"],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rehlko",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiokem"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@jimmyd-be"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renson",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["renson-endura-delta==1.7.2"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@danielhiversen", "@elupus", "@RobBie1221"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rfxtrx",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["RFXtrx"],
|
||||
"requirements": ["pyRFXtrx==0.31.1"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyrituals"],
|
||||
"requirements": ["pyrituals==0.0.7"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@xeniter"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/romy",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["romy==0.0.10"],
|
||||
"zeroconf": ["_aicu-http._tcp.local."]
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["paho_mqtt", "roombapy"],
|
||||
"requirements": ["roombapy==1.9.0"],
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@pavoni"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/roon",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["roonapi"],
|
||||
"requirements": ["roonapi==0.1.6"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rova",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["rova"],
|
||||
"requirements": ["rova==0.4.1"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@noahhusby"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvi_gateway",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioruuvigateway==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvitag_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["ruuvitag-ble==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@OnFreund", "@elad-bar", "@maorcc"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rympro",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyrympro==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@shaiu", "@jpbede"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sabnzbd",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysabnzbd"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@dknowles2"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/schlage",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyschlage==2025.9.0"]
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.8"]
|
||||
|
||||
@@ -18,6 +18,9 @@
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Provides conditions for switches."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
|
||||
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the switch conditions."""
|
||||
return CONDITIONS
|
||||
@@ -1,17 +0,0 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: switch
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
@@ -1,12 +1,4 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:toggle-switch-variant-off"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:toggle-switch-variant"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:toggle-switch-variant",
|
||||
|
||||
@@ -1,32 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted switches.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted switches to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more switches are off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::switch::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::switch::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a switch is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more switches are on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::switch::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::switch::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a switch is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"toggle": "[%key:common::device_automation::action_type::toggle%]",
|
||||
@@ -70,12 +46,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"wrong_account": "Wrong account: Please authenticate with {username}."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"description": "The Twitch integration needs to re-authenticate your account",
|
||||
|
||||
@@ -21,6 +21,9 @@
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found a Withings device on your network. Be aware that the setup of Withings is more complicated than many other integrations. Press **Submit** to continue setting up Withings."
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found an Xbox device on your network. Press **Submit** to continue setting up the Xbox integration.",
|
||||
|
||||
@@ -17,6 +17,9 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"channels": {
|
||||
"data": { "channels": "YouTube channels" },
|
||||
|
||||
@@ -5375,7 +5375,7 @@
|
||||
"name": "QNAP"
|
||||
},
|
||||
"qnap_qsw": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "QNAP QSW"
|
||||
@@ -5413,7 +5413,7 @@
|
||||
},
|
||||
"rabbitair": {
|
||||
"name": "Rabbit Air",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5438,7 +5438,7 @@
|
||||
},
|
||||
"radiotherm": {
|
||||
"name": "Radio Thermostat",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5473,7 +5473,7 @@
|
||||
},
|
||||
"rapt_ble": {
|
||||
"name": "RAPT Bluetooth",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -5571,7 +5571,7 @@
|
||||
},
|
||||
"renson": {
|
||||
"name": "Renson",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5679,13 +5679,13 @@
|
||||
},
|
||||
"romy": {
|
||||
"name": "ROMY Vacuum Cleaner",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"roomba": {
|
||||
"name": "iRobot Roomba and Braava",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -5720,7 +5720,7 @@
|
||||
},
|
||||
"rova": {
|
||||
"name": "ROVA",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -5763,13 +5763,13 @@
|
||||
"name": "Ruuvi",
|
||||
"integrations": {
|
||||
"ruuvi_gateway": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Ruuvi Gateway"
|
||||
},
|
||||
"ruuvitag_ble": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "Ruuvi BLE"
|
||||
@@ -5784,7 +5784,7 @@
|
||||
},
|
||||
"sabnzbd": {
|
||||
"name": "SABnzbd",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
|
||||
@@ -370,9 +370,13 @@ def _async_get_connector(
|
||||
return connectors[connector_key]
|
||||
|
||||
if verify_ssl:
|
||||
ssl_context: SSLContext = ssl_util.client_context(ssl_cipher)
|
||||
ssl_context: SSLContext = ssl_util.client_context(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
else:
|
||||
ssl_context = ssl_util.client_context_no_verify(ssl_cipher)
|
||||
ssl_context = ssl_util.client_context_no_verify(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
connector = HomeAssistantTCPConnector(
|
||||
family=family,
|
||||
|
||||
@@ -17,6 +17,7 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Final,
|
||||
Literal,
|
||||
Protocol,
|
||||
TypedDict,
|
||||
Unpack,
|
||||
@@ -28,7 +29,10 @@ from typing import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_FLOOR_ID,
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ABOVE,
|
||||
CONF_AFTER,
|
||||
CONF_ATTRIBUTE,
|
||||
@@ -1346,13 +1350,18 @@ def async_extract_entities(config: ConfigType | Template) -> set[str]:
|
||||
if entity_ids is not None:
|
||||
referenced.update(entity_ids)
|
||||
|
||||
if target_entities := _get_targets_from_condition_config(
|
||||
config, CONF_ENTITY_ID
|
||||
):
|
||||
referenced.update(target_entities)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_devices(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract devices from a condition."""
|
||||
referenced = set()
|
||||
referenced: set[str] = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
@@ -1366,15 +1375,75 @@ def async_extract_devices(config: ConfigType | Template) -> set[str]:
|
||||
to_process.extend(config["conditions"])
|
||||
continue
|
||||
|
||||
if condition != "device":
|
||||
if condition == "device":
|
||||
if (device_id := config.get(CONF_DEVICE_ID)) is not None:
|
||||
referenced.add(device_id)
|
||||
continue
|
||||
|
||||
if (device_id := config.get(CONF_DEVICE_ID)) is not None:
|
||||
referenced.add(device_id)
|
||||
if target_devices := _get_targets_from_condition_config(config, CONF_DEVICE_ID):
|
||||
referenced.update(target_devices)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_areas(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract areas from a condition."""
|
||||
return _async_extract_targets(config, ATTR_AREA_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_floors(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract floors from a condition."""
|
||||
return _async_extract_targets(config, ATTR_FLOOR_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_labels(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract labels from a condition."""
|
||||
return _async_extract_targets(config, ATTR_LABEL_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_extract_targets(
|
||||
config: ConfigType | Template,
|
||||
target_type: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> set[str]:
|
||||
"""Extract targets from a condition."""
|
||||
referenced: set[str] = set()
|
||||
to_process = deque([config])
|
||||
|
||||
while to_process:
|
||||
config = to_process.popleft()
|
||||
if isinstance(config, Template):
|
||||
continue
|
||||
|
||||
condition = config[CONF_CONDITION]
|
||||
|
||||
if condition in ("and", "not", "or"):
|
||||
to_process.extend(config["conditions"])
|
||||
continue
|
||||
|
||||
if targets := _get_targets_from_condition_config(config, target_type):
|
||||
referenced.update(targets)
|
||||
|
||||
return referenced
|
||||
|
||||
|
||||
@callback
|
||||
def _get_targets_from_condition_config(
|
||||
config: ConfigType,
|
||||
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> list[str]:
|
||||
"""Extract targets from a condition target config."""
|
||||
if not (target_conf := config.get(CONF_TARGET)):
|
||||
return []
|
||||
if not (targets := target_conf.get(target)):
|
||||
return []
|
||||
|
||||
return [targets] if isinstance(targets, str) else targets
|
||||
|
||||
|
||||
def _load_conditions_file(integration: Integration) -> dict[str, Any]:
|
||||
"""Load conditions file for an integration."""
|
||||
try:
|
||||
|
||||
@@ -17,6 +17,9 @@ from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.ssl import (
|
||||
SSL_ALPN_HTTP11,
|
||||
SSL_ALPN_HTTP11_HTTP2,
|
||||
SSLALPNProtocols,
|
||||
SSLCipherList,
|
||||
client_context,
|
||||
create_no_verify_ssl_context,
|
||||
@@ -28,9 +31,9 @@ from .frame import warn_use
|
||||
# and we want to keep the connection open for a while so we
|
||||
# don't have to reconnect every time so we use 15s to match aiohttp.
|
||||
KEEP_ALIVE_TIMEOUT = 15
|
||||
DATA_ASYNC_CLIENT: HassKey[httpx.AsyncClient] = HassKey("httpx_async_client")
|
||||
DATA_ASYNC_CLIENT_NOVERIFY: HassKey[httpx.AsyncClient] = HassKey(
|
||||
"httpx_async_client_noverify"
|
||||
# Shared httpx clients keyed by (verify_ssl, alpn_protocols)
|
||||
DATA_ASYNC_CLIENT: HassKey[dict[tuple[bool, SSLALPNProtocols], httpx.AsyncClient]] = (
|
||||
HassKey("httpx_async_client")
|
||||
)
|
||||
DEFAULT_LIMITS = limits = httpx.Limits(keepalive_expiry=KEEP_ALIVE_TIMEOUT)
|
||||
SERVER_SOFTWARE = (
|
||||
@@ -42,15 +45,26 @@ USER_AGENT = "User-Agent"
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
|
||||
def get_async_client(
|
||||
hass: HomeAssistant,
|
||||
verify_ssl: bool = True,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Return default httpx AsyncClient.
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
|
||||
|
||||
if (client := hass.data.get(key)) is None:
|
||||
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 to get a client configured for HTTP/2.
|
||||
Clients are cached separately by ALPN protocol to ensure proper SSL context
|
||||
configuration (ALPN protocols differ between HTTP versions).
|
||||
"""
|
||||
client_key = (verify_ssl, alpn_protocols)
|
||||
clients = hass.data.setdefault(DATA_ASYNC_CLIENT, {})
|
||||
|
||||
if (client := clients.get(client_key)) is None:
|
||||
client = clients[client_key] = create_async_httpx_client(
|
||||
hass, verify_ssl, alpn_protocols=alpn_protocols
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
@@ -77,6 +91,7 @@ def create_async_httpx_client(
|
||||
verify_ssl: bool = True,
|
||||
auto_cleanup: bool = True,
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
**kwargs: Any,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
|
||||
@@ -84,13 +99,22 @@ def create_async_httpx_client(
|
||||
If auto_cleanup is False, the client will be
|
||||
automatically closed on homeassistant_stop.
|
||||
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 for HTTP/2 support (automatically
|
||||
enables httpx http2 mode).
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
# Use the requested ALPN protocols directly to ensure proper SSL context
|
||||
# bucketing. httpx/httpcore mutates SSL contexts by calling set_alpn_protocols(),
|
||||
# so we pre-set the correct protocols to prevent shared context corruption.
|
||||
ssl_context = (
|
||||
client_context(ssl_cipher_list)
|
||||
client_context(ssl_cipher_list, alpn_protocols)
|
||||
if verify_ssl
|
||||
else create_no_verify_ssl_context(ssl_cipher_list)
|
||||
else create_no_verify_ssl_context(ssl_cipher_list, alpn_protocols)
|
||||
)
|
||||
# Enable httpx HTTP/2 mode when HTTP/2 protocol is requested
|
||||
if alpn_protocols == SSL_ALPN_HTTP11_HTTP2:
|
||||
kwargs.setdefault("http2", True)
|
||||
client = HassHttpXAsyncClient(
|
||||
verify=ssl_context,
|
||||
headers={USER_AGENT: SERVER_SOFTWARE},
|
||||
|
||||
@@ -594,6 +594,8 @@ class EntityNumericalStateAttributeChangedTriggerBase(EntityTriggerBase):
|
||||
_above: None | float | str
|
||||
_below: None | float | str
|
||||
|
||||
_converter: Callable[[Any], float] = float
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
@@ -616,7 +618,7 @@ class EntityNumericalStateAttributeChangedTriggerBase(EntityTriggerBase):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_value = float(_attribute_value)
|
||||
current_value = self._converter(_attribute_value)
|
||||
except (TypeError, ValueError):
|
||||
# Attribute is not a valid number, don't trigger
|
||||
return False
|
||||
@@ -706,6 +708,8 @@ class EntityNumericalStateAttributeCrossedThresholdTriggerBase(EntityTriggerBase
|
||||
_upper_limit: float | str | None = None
|
||||
_threshold_type: ThresholdType
|
||||
|
||||
_converter: Callable[[Any], float] = float
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
@@ -741,7 +745,7 @@ class EntityNumericalStateAttributeCrossedThresholdTriggerBase(EntityTriggerBase
|
||||
return False
|
||||
|
||||
try:
|
||||
current_value = float(_attribute_value)
|
||||
current_value = self._converter(_attribute_value)
|
||||
except (TypeError, ValueError):
|
||||
# Attribute is not a valid number, don't trigger
|
||||
return False
|
||||
|
||||
@@ -8,6 +8,17 @@ import ssl
|
||||
|
||||
import certifi
|
||||
|
||||
# Type alias for ALPN protocols tuple (None means no ALPN protocols set)
|
||||
type SSLALPNProtocols = tuple[str, ...] | None
|
||||
|
||||
# ALPN protocol configurations
|
||||
# No ALPN protocols - used for libraries that don't support/need ALPN (e.g., aioimap)
|
||||
SSL_ALPN_NONE: SSLALPNProtocols = None
|
||||
# HTTP/1.1 only - used by default and for aiohttp (which doesn't support HTTP/2)
|
||||
SSL_ALPN_HTTP11: SSLALPNProtocols = ("http/1.1",)
|
||||
# HTTP/1.1 with HTTP/2 support - used when httpx http2=True
|
||||
SSL_ALPN_HTTP11_HTTP2: SSLALPNProtocols = ("http/1.1", "h2")
|
||||
|
||||
|
||||
class SSLCipherList(StrEnum):
|
||||
"""SSL cipher lists."""
|
||||
@@ -64,7 +75,10 @@ SSL_CIPHER_LISTS = {
|
||||
|
||||
|
||||
@cache
|
||||
def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
def _client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList,
|
||||
alpn_protocols: SSLALPNProtocols,
|
||||
) -> ssl.SSLContext:
|
||||
# This is a copy of aiohttp's create_default_context() function, with the
|
||||
# ssl verify turned off.
|
||||
# https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
|
||||
@@ -78,12 +92,18 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
sslcontext.set_default_verify_paths()
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
|
||||
def _create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# Reuse environment variable definition from requests, since it's already a
|
||||
@@ -96,6 +116,11 @@ def _create_client_context(
|
||||
)
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
@@ -103,63 +128,63 @@ def _create_client_context(
|
||||
@cache
|
||||
def _client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
# Cached version of _create_client_context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
# Create this only once and reuse it
|
||||
_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT)
|
||||
_NO_VERIFY_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE),
|
||||
}
|
||||
_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE),
|
||||
}
|
||||
# Pre-warm the cache for ALL SSL context configurations at module load time.
|
||||
# This is critical because creating SSL contexts loads certificates from disk,
|
||||
# which is blocking I/O that must not happen in the event loop.
|
||||
_SSL_ALPN_PROTOCOLS = (SSL_ALPN_NONE, SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2)
|
||||
for _cipher in SSLCipherList:
|
||||
for _alpn in _SSL_ALPN_PROTOCOLS:
|
||||
_client_context(_cipher, _alpn)
|
||||
_client_context_no_verify(_cipher, _alpn)
|
||||
|
||||
|
||||
def get_default_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context."""
|
||||
return _DEFAULT_SSL_CONTEXT
|
||||
return _client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def get_default_no_verify_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context that does not verify the server certificate."""
|
||||
return _DEFAULT_NO_VERIFY_SSL_CONTEXT
|
||||
return _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return a SSL context with no verification with a specific ssl cipher."""
|
||||
return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context for making requests."""
|
||||
return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT)
|
||||
return _client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# This explicitly uses the non-cached version to create a client context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_no_verify_ssl_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context that does not verify the server certificate."""
|
||||
return _client_context_no_verify(ssl_cipher_list)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def server_context_modern() -> ssl.SSLContext:
|
||||
|
||||
4
requirements_all.txt
generated
4
requirements_all.txt
generated
@@ -1646,7 +1646,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.1.0
|
||||
onedrive-personal-sdk==0.1.1
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -2147,7 +2147,7 @@ pykmtronic==0.3.0
|
||||
pykodi==0.2.7
|
||||
|
||||
# homeassistant.components.kostal_plenticore
|
||||
pykoplenti==1.3.0
|
||||
pykoplenti==1.5.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
|
||||
4
requirements_test_all.txt
generated
4
requirements_test_all.txt
generated
@@ -1429,7 +1429,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.1.0
|
||||
onedrive-personal-sdk==0.1.1
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==4.0.4
|
||||
@@ -1821,7 +1821,7 @@ pykmtronic==0.3.0
|
||||
pykodi==0.2.7
|
||||
|
||||
# homeassistant.components.kostal_plenticore
|
||||
pykoplenti==1.3.0
|
||||
pykoplenti==1.5.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
|
||||
@@ -2232,7 +2232,7 @@ async def test_extraction_functions(
|
||||
assert automation.blueprint_in_automation(hass, "automation.test3") is None
|
||||
|
||||
|
||||
async def test_extraction_functions_with_targets(
|
||||
async def test_extraction_functions_with_trigger_targets(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
@@ -2428,6 +2428,211 @@ async def test_extraction_functions_with_targets(
|
||||
}
|
||||
|
||||
|
||||
async def test_extraction_functions_with_condition_targets(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test extraction functions with targets in conditions."""
|
||||
config_entry = MockConfigEntry(domain="fake_integration", data={})
|
||||
config_entry.mock_state(hass, ConfigEntryState.LOADED)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
condition_device = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "00:00:00:00:00:02")},
|
||||
)
|
||||
|
||||
await async_setup_component(hass, "homeassistant", {})
|
||||
await async_setup_component(hass, "light", {"light": {"platform": "demo"}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Enable the new_triggers_conditions feature flag to allow new-style conditions
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
ws_client = await hass_ws_client(hass)
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: [
|
||||
{
|
||||
"alias": "test1",
|
||||
"triggers": [
|
||||
{"trigger": "state", "entity_id": "sensor.trigger_state"},
|
||||
],
|
||||
"conditions": [
|
||||
# Single entity_id in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {"entity_id": "light.condition_entity"},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Multiple entity_ids in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"entity_id": [
|
||||
"light.condition_entity_list1",
|
||||
"light.condition_entity_list2",
|
||||
]
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Single device_id in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {"device_id": condition_device.id},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Multiple device_ids in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"device_id": [
|
||||
"target-device-1",
|
||||
"target-device-2",
|
||||
]
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Single area_id in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {"area_id": "area-condition-single"},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Multiple area_ids in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"area_id": ["area-condition-1", "area-condition-2"]
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Single floor_id in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {"floor_id": "floor-condition-single"},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Multiple floor_ids in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"floor_id": ["floor-condition-1", "floor-condition-2"]
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Single label_id in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {"label_id": "label-condition-single"},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Multiple label_ids in target
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"label_id": ["label-condition-1", "label-condition-2"]
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
# Combined targets
|
||||
{
|
||||
"condition": "light.is_on",
|
||||
"target": {
|
||||
"entity_id": "light.combined_entity",
|
||||
"device_id": "combined-device",
|
||||
"area_id": "combined-area",
|
||||
"floor_id": "combined-floor",
|
||||
"label_id": "combined-label",
|
||||
},
|
||||
"options": {"behavior": "any"},
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"action": "test.script",
|
||||
"data": {"entity_id": "light.action_entity"},
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
# Test entity extraction from condition targets
|
||||
assert set(automation.entities_in_automation(hass, "automation.test1")) == {
|
||||
"sensor.trigger_state",
|
||||
"light.condition_entity",
|
||||
"light.condition_entity_list1",
|
||||
"light.condition_entity_list2",
|
||||
"light.combined_entity",
|
||||
"light.action_entity",
|
||||
}
|
||||
|
||||
# Test device extraction from condition targets
|
||||
assert set(automation.devices_in_automation(hass, "automation.test1")) == {
|
||||
condition_device.id,
|
||||
"target-device-1",
|
||||
"target-device-2",
|
||||
"combined-device",
|
||||
}
|
||||
|
||||
# Test area extraction from condition targets
|
||||
assert set(automation.areas_in_automation(hass, "automation.test1")) == {
|
||||
"area-condition-single",
|
||||
"area-condition-1",
|
||||
"area-condition-2",
|
||||
"combined-area",
|
||||
}
|
||||
|
||||
# Test floor extraction from condition targets
|
||||
assert set(automation.floors_in_automation(hass, "automation.test1")) == {
|
||||
"floor-condition-single",
|
||||
"floor-condition-1",
|
||||
"floor-condition-2",
|
||||
"combined-floor",
|
||||
}
|
||||
|
||||
# Test label extraction from condition targets
|
||||
assert set(automation.labels_in_automation(hass, "automation.test1")) == {
|
||||
"label-condition-single",
|
||||
"label-condition-1",
|
||||
"label-condition-2",
|
||||
"combined-label",
|
||||
}
|
||||
|
||||
# Test automations_with_* functions
|
||||
assert set(automation.automations_with_entity(hass, "light.condition_entity")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_device(hass, condition_device.id)) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_area(hass, "area-condition-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_floor(hass, "floor-condition-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_label(hass, "label-condition-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
|
||||
|
||||
async def test_logbook_humanify_automation_triggered_event(hass: HomeAssistant) -> None:
|
||||
"""Test humanifying Automation Trigger event."""
|
||||
hass.config.components.add("recorder")
|
||||
|
||||
@@ -5,14 +5,25 @@ from typing import Any
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS
|
||||
from homeassistant.const import ATTR_LABEL_ID, CONF_ENTITY_ID, STATE_OFF, STATE_ON
|
||||
from homeassistant.const import (
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ABOVE,
|
||||
CONF_BELOW,
|
||||
CONF_ENTITY_ID,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers.trigger import (
|
||||
CONF_LOWER_LIMIT,
|
||||
CONF_THRESHOLD_TYPE,
|
||||
CONF_UPPER_LIMIT,
|
||||
ThresholdType,
|
||||
)
|
||||
|
||||
from tests.components import (
|
||||
TriggerStateDescription,
|
||||
arm_trigger,
|
||||
parametrize_numerical_attribute_changed_trigger_states,
|
||||
parametrize_numerical_attribute_crossed_threshold_trigger_states,
|
||||
parametrize_target_entities,
|
||||
parametrize_trigger_states,
|
||||
set_or_remove_state,
|
||||
@@ -26,6 +37,131 @@ async def target_lights(hass: HomeAssistant) -> list[str]:
|
||||
return (await target_entities(hass, "light"))["included"]
|
||||
|
||||
|
||||
def parametrize_brightness_changed_trigger_states(
|
||||
trigger: str, state: str, attribute: str
|
||||
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for brightness changed triggers.
|
||||
|
||||
Note: The brightness in the trigger configuration is in percentage (0-100) scale,
|
||||
the underlying attribute in the state is in uint8 (0-255) scale.
|
||||
"""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={},
|
||||
target_states=[
|
||||
(state, {attribute: 0}),
|
||||
(state, {attribute: 128}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
other_states=[(state, {attribute: None})],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_ABOVE: 10},
|
||||
target_states=[
|
||||
(state, {attribute: 128}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 0}),
|
||||
],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={CONF_BELOW: 90},
|
||||
target_states=[
|
||||
(state, {attribute: 0}),
|
||||
(state, {attribute: 128}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
retrigger_on_target_state=True,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def parametrize_brightness_crossed_threshold_trigger_states(
|
||||
trigger: str, state: str, attribute: str
|
||||
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
|
||||
"""Parametrize states and expected service call counts for brightness crossed threshold triggers.
|
||||
|
||||
Note: The brightness in the trigger configuration is in percentage (0-100) scale,
|
||||
the underlying attribute in the state is in uint8 (0-255) scale.
|
||||
"""
|
||||
return [
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
|
||||
CONF_LOWER_LIMIT: 10,
|
||||
CONF_UPPER_LIMIT: 90,
|
||||
},
|
||||
target_states=[
|
||||
(state, {attribute: 128}),
|
||||
(state, {attribute: 153}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 0}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
|
||||
CONF_LOWER_LIMIT: 10,
|
||||
CONF_UPPER_LIMIT: 90,
|
||||
},
|
||||
target_states=[
|
||||
(state, {attribute: 0}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 128}),
|
||||
(state, {attribute: 153}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
|
||||
CONF_LOWER_LIMIT: 10,
|
||||
},
|
||||
target_states=[
|
||||
(state, {attribute: 128}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 0}),
|
||||
],
|
||||
),
|
||||
*parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
trigger_options={
|
||||
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
|
||||
CONF_UPPER_LIMIT: 90,
|
||||
},
|
||||
target_states=[
|
||||
(state, {attribute: 0}),
|
||||
(state, {attribute: 128}),
|
||||
],
|
||||
other_states=[
|
||||
(state, {attribute: None}),
|
||||
(state, {attribute: 255}),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"trigger_key",
|
||||
[
|
||||
@@ -114,10 +250,10 @@ async def test_light_state_trigger_behavior_any(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_numerical_attribute_changed_trigger_states(
|
||||
*parametrize_brightness_changed_trigger_states(
|
||||
"light.brightness_changed", STATE_ON, ATTR_BRIGHTNESS
|
||||
),
|
||||
*parametrize_numerical_attribute_crossed_threshold_trigger_states(
|
||||
*parametrize_brightness_crossed_threshold_trigger_states(
|
||||
"light.brightness_crossed_threshold", STATE_ON, ATTR_BRIGHTNESS
|
||||
),
|
||||
],
|
||||
@@ -225,7 +361,7 @@ async def test_light_state_trigger_behavior_first(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_numerical_attribute_crossed_threshold_trigger_states(
|
||||
*parametrize_brightness_crossed_threshold_trigger_states(
|
||||
"light.brightness_crossed_threshold", STATE_ON, ATTR_BRIGHTNESS
|
||||
),
|
||||
],
|
||||
@@ -333,7 +469,7 @@ async def test_light_state_trigger_behavior_last(
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_numerical_attribute_crossed_threshold_trigger_states(
|
||||
*parametrize_brightness_crossed_threshold_trigger_states(
|
||||
"light.brightness_crossed_threshold", STATE_ON, ATTR_BRIGHTNESS
|
||||
),
|
||||
],
|
||||
|
||||
@@ -19,11 +19,6 @@ from tests.components import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_sirens(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple siren entities associated with different targets."""
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
"""Test switch conditions."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.components import (
|
||||
ConditionStateDescription,
|
||||
assert_condition_gated_by_labs_flag,
|
||||
create_target_condition,
|
||||
parametrize_condition_states_all,
|
||||
parametrize_condition_states_any,
|
||||
parametrize_target_entities,
|
||||
set_or_remove_state,
|
||||
target_entities,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="stub_blueprint_populate")
|
||||
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
|
||||
"""Stub copying the blueprints to the config folder."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_lights(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple light entities associated with different targets."""
|
||||
return (await target_entities(hass, "light"))["included"]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def target_switches(hass: HomeAssistant) -> list[str]:
|
||||
"""Create multiple switch entities associated with different targets."""
|
||||
return (await target_entities(hass, "switch"))["included"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"condition",
|
||||
[
|
||||
"switch.is_off",
|
||||
"switch.is_on",
|
||||
],
|
||||
)
|
||||
async def test_switch_conditions_gated_by_labs_flag(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, condition: str
|
||||
) -> None:
|
||||
"""Test the switch conditions are gated by the labs flag."""
|
||||
await assert_condition_gated_by_labs_flag(hass, caplog, condition)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_labs_preview_features")
|
||||
@pytest.mark.parametrize(
|
||||
("condition_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("switch"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states_any(
|
||||
condition="switch.is_on",
|
||||
target_states=[STATE_ON],
|
||||
other_states=[STATE_OFF],
|
||||
),
|
||||
*parametrize_condition_states_any(
|
||||
condition="switch.is_off",
|
||||
target_states=[STATE_OFF],
|
||||
other_states=[STATE_ON],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_switch_state_condition_behavior_any(
|
||||
hass: HomeAssistant,
|
||||
target_lights: list[str],
|
||||
target_switches: list[str],
|
||||
condition_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
condition: str,
|
||||
condition_options: dict[str, Any],
|
||||
states: list[ConditionStateDescription],
|
||||
) -> None:
|
||||
"""Test the switch state condition with the 'any' behavior."""
|
||||
other_entity_ids = set(target_switches) - {entity_id}
|
||||
|
||||
# Set all switches, including the tested switch, to the initial state
|
||||
for eid in target_switches:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
condition = await create_target_condition(
|
||||
hass,
|
||||
condition=condition,
|
||||
target=condition_target_config,
|
||||
behavior="any",
|
||||
)
|
||||
|
||||
# Set state for lights to ensure that they don't impact the condition
|
||||
for state in states:
|
||||
for eid in target_lights:
|
||||
set_or_remove_state(hass, eid, state["included"])
|
||||
await hass.async_block_till_done()
|
||||
assert condition(hass) is False
|
||||
|
||||
for state in states:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert condition(hass) == state["condition_true"]
|
||||
|
||||
# Check if changing other lights also passes the condition
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert condition(hass) == state["condition_true"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_labs_preview_features")
|
||||
@pytest.mark.parametrize(
|
||||
("condition_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("switch"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("condition", "condition_options", "states"),
|
||||
[
|
||||
*parametrize_condition_states_all(
|
||||
condition="switch.is_on",
|
||||
target_states=[STATE_ON],
|
||||
other_states=[STATE_OFF],
|
||||
),
|
||||
*parametrize_condition_states_all(
|
||||
condition="switch.is_off",
|
||||
target_states=[STATE_OFF],
|
||||
other_states=[STATE_ON],
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_switch_state_condition_behavior_all(
|
||||
hass: HomeAssistant,
|
||||
target_switches: list[str],
|
||||
condition_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
condition: str,
|
||||
condition_options: dict[str, Any],
|
||||
states: list[ConditionStateDescription],
|
||||
) -> None:
|
||||
"""Test the switch state condition with the 'all' behavior."""
|
||||
# Set state for two switches to ensure that they don't impact the condition
|
||||
hass.states.async_set("switch.label_switch_1", STATE_OFF)
|
||||
hass.states.async_set("switch.label_switch_2", STATE_ON)
|
||||
|
||||
other_entity_ids = set(target_switches) - {entity_id}
|
||||
|
||||
# Set all switches, including the tested switch, to the initial state
|
||||
for eid in target_switches:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
condition = await create_target_condition(
|
||||
hass,
|
||||
condition=condition,
|
||||
target=condition_target_config,
|
||||
behavior="all",
|
||||
)
|
||||
|
||||
for state in states:
|
||||
included_state = state["included"]
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert condition(hass) == state["condition_true_first_entity"]
|
||||
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert condition(hass) == state["condition_true"]
|
||||
@@ -22,6 +22,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client as client
|
||||
from homeassistant.util import ssl as ssl_util
|
||||
from homeassistant.util.color import RGBColor
|
||||
from homeassistant.util.ssl import SSLCipherList
|
||||
|
||||
@@ -413,3 +414,29 @@ async def test_resolver_is_singleton(hass: HomeAssistant) -> None:
|
||||
assert isinstance(session3._connector, aiohttp.TCPConnector)
|
||||
assert session._connector._resolver is session2._connector._resolver
|
||||
assert session._connector._resolver is session3._connector._resolver
|
||||
|
||||
|
||||
async def test_connector_uses_http11_alpn(hass: HomeAssistant) -> None:
|
||||
"""Test that connector uses HTTP/1.1 ALPN protocols."""
|
||||
with patch.object(
|
||||
ssl_util, "client_context", wraps=ssl_util.client_context
|
||||
) as mock_client_context:
|
||||
client.async_get_clientsession(hass)
|
||||
|
||||
# Verify client_context was called with HTTP/1.1 ALPN
|
||||
mock_client_context.assert_called_once_with(
|
||||
SSLCipherList.PYTHON_DEFAULT, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
|
||||
async def test_connector_no_verify_uses_http11_alpn(hass: HomeAssistant) -> None:
|
||||
"""Test that connector without SSL verification uses HTTP/1.1 ALPN protocols."""
|
||||
with patch.object(
|
||||
ssl_util, "client_context_no_verify", wraps=ssl_util.client_context_no_verify
|
||||
) as mock_client_context_no_verify:
|
||||
client.async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
# Verify client_context_no_verify was called with HTTP/1.1 ALPN
|
||||
mock_client_context_no_verify.assert_called_once_with(
|
||||
SSLCipherList.PYTHON_DEFAULT, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ import pytest
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import httpx_client as client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from tests.common import MockModule, extract_stack_to_frame, mock_integration
|
||||
|
||||
@@ -16,14 +17,20 @@ async def test_get_async_client_with_ssl(hass: HomeAssistant) -> None:
|
||||
"""Test init async client with ssl."""
|
||||
client.get_async_client(hass)
|
||||
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
|
||||
async def test_get_async_client_without_ssl(hass: HomeAssistant) -> None:
|
||||
"""Test init async client without ssl."""
|
||||
client.get_async_client(hass, verify_ssl=False)
|
||||
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
|
||||
async def test_create_async_httpx_client_with_ssl_and_cookies(
|
||||
@@ -34,7 +41,7 @@ async def test_create_async_httpx_client_with_ssl_and_cookies(
|
||||
|
||||
httpx_client = client.create_async_httpx_client(hass, cookies={"bla": True})
|
||||
assert isinstance(httpx_client, httpx.AsyncClient)
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT] != httpx_client
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)] != httpx_client
|
||||
|
||||
|
||||
async def test_create_async_httpx_client_without_ssl_and_cookies(
|
||||
@@ -47,31 +54,37 @@ async def test_create_async_httpx_client_without_ssl_and_cookies(
|
||||
hass, verify_ssl=False, cookies={"bla": True}
|
||||
)
|
||||
assert isinstance(httpx_client, httpx.AsyncClient)
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY] != httpx_client
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)] != httpx_client
|
||||
|
||||
|
||||
async def test_get_async_client_cleanup(hass: HomeAssistant) -> None:
|
||||
"""Test init async client with ssl."""
|
||||
client.get_async_client(hass)
|
||||
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT].is_closed
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)].is_closed
|
||||
|
||||
|
||||
async def test_get_async_client_cleanup_without_ssl(hass: HomeAssistant) -> None:
|
||||
"""Test init async client without ssl."""
|
||||
client.get_async_client(hass, verify_ssl=False)
|
||||
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT_NOVERIFY].is_closed
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11)].is_closed
|
||||
|
||||
|
||||
async def test_get_async_client_patched_close(hass: HomeAssistant) -> None:
|
||||
@@ -79,7 +92,10 @@ async def test_get_async_client_patched_close(hass: HomeAssistant) -> None:
|
||||
|
||||
with patch("httpx.AsyncClient.aclose") as mock_aclose:
|
||||
httpx_session = client.get_async_client(hass)
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
with pytest.raises(RuntimeError):
|
||||
await httpx_session.aclose()
|
||||
@@ -92,7 +108,10 @@ async def test_get_async_client_context_manager(hass: HomeAssistant) -> None:
|
||||
|
||||
with patch("httpx.AsyncClient.aclose") as mock_aclose:
|
||||
httpx_session = client.get_async_client(hass)
|
||||
assert isinstance(hass.data[client.DATA_ASYNC_CLIENT], httpx.AsyncClient)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
async with httpx_session:
|
||||
pass
|
||||
@@ -100,6 +119,80 @@ async def test_get_async_client_context_manager(hass: HomeAssistant) -> None:
|
||||
assert mock_aclose.call_count == 0
|
||||
|
||||
|
||||
async def test_get_async_client_http2(hass: HomeAssistant) -> None:
|
||||
"""Test init async client with HTTP/2 support."""
|
||||
http1_client = client.get_async_client(hass)
|
||||
http2_client = client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
|
||||
|
||||
# HTTP/1.1 and HTTP/2 clients should be different (different SSL contexts)
|
||||
assert http1_client is not http2_client
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
# Same parameters should return cached client
|
||||
assert client.get_async_client(hass) is http1_client
|
||||
assert (
|
||||
client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
|
||||
is http2_client
|
||||
)
|
||||
|
||||
|
||||
async def test_get_async_client_http2_cleanup(hass: HomeAssistant) -> None:
|
||||
"""Test cleanup of HTTP/2 async client."""
|
||||
client.get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2)
|
||||
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.data[client.DATA_ASYNC_CLIENT][(True, SSL_ALPN_HTTP11_HTTP2)].is_closed
|
||||
|
||||
|
||||
async def test_get_async_client_http2_without_ssl(hass: HomeAssistant) -> None:
|
||||
"""Test init async client with HTTP/2 and without SSL."""
|
||||
http2_client = client.get_async_client(
|
||||
hass, verify_ssl=False, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
)
|
||||
|
||||
assert isinstance(
|
||||
hass.data[client.DATA_ASYNC_CLIENT][(False, SSL_ALPN_HTTP11_HTTP2)],
|
||||
httpx.AsyncClient,
|
||||
)
|
||||
|
||||
# Same parameters should return cached client
|
||||
assert (
|
||||
client.get_async_client(
|
||||
hass, verify_ssl=False, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
)
|
||||
is http2_client
|
||||
)
|
||||
|
||||
|
||||
async def test_create_async_httpx_client_http2(hass: HomeAssistant) -> None:
|
||||
"""Test create async client with HTTP/2 uses correct ALPN protocols."""
|
||||
http1_client = client.create_async_httpx_client(hass)
|
||||
http2_client = client.create_async_httpx_client(
|
||||
hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
)
|
||||
|
||||
# Different clients (not cached)
|
||||
assert http1_client is not http2_client
|
||||
|
||||
# Both should be valid clients
|
||||
assert isinstance(http1_client, httpx.AsyncClient)
|
||||
assert isinstance(http2_client, httpx.AsyncClient)
|
||||
|
||||
|
||||
async def test_warning_close_session_integration(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
|
||||
@@ -188,12 +188,10 @@ async def test_caching(hass: HomeAssistant) -> None:
|
||||
side_effect=icon.build_resources,
|
||||
) as mock_build:
|
||||
load1 = await icon.async_get_icons(hass, "entity_component")
|
||||
# conditions, entity_component, services, triggers
|
||||
assert len(mock_build.mock_calls) == 4
|
||||
assert len(mock_build.mock_calls) == 3 # entity_component, services, triggers
|
||||
|
||||
load2 = await icon.async_get_icons(hass, "entity_component")
|
||||
# conditions, entity_component, services, triggers
|
||||
assert len(mock_build.mock_calls) == 4
|
||||
assert len(mock_build.mock_calls) == 3 # entity_component, services, triggers
|
||||
|
||||
assert load1 == load2
|
||||
|
||||
|
||||
@@ -1,78 +1,58 @@
|
||||
"""Test Home Assistant ssl utility functions."""
|
||||
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.util.ssl import (
|
||||
SSL_ALPN_HTTP11,
|
||||
SSL_ALPN_HTTP11_HTTP2,
|
||||
SSL_ALPN_NONE,
|
||||
SSLCipherList,
|
||||
client_context,
|
||||
client_context_no_verify,
|
||||
create_client_context,
|
||||
create_no_verify_ssl_context,
|
||||
get_default_context,
|
||||
get_default_no_verify_context,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sslcontext():
|
||||
"""Mock the ssl lib."""
|
||||
return MagicMock(set_ciphers=Mock(return_value=True))
|
||||
|
||||
|
||||
def test_client_context(mock_sslcontext) -> None:
|
||||
"""Test client context."""
|
||||
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
|
||||
client_context()
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
client_context(SSLCipherList.MODERN)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
client_context(SSLCipherList.INTERMEDIATE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
client_context(SSLCipherList.INSECURE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
|
||||
def test_no_verify_ssl_context(mock_sslcontext) -> None:
|
||||
"""Test no verify ssl context."""
|
||||
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
|
||||
create_no_verify_ssl_context()
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
create_no_verify_ssl_context(SSLCipherList.MODERN)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
create_no_verify_ssl_context(SSLCipherList.INTERMEDIATE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
create_no_verify_ssl_context(SSLCipherList.INSECURE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
|
||||
def test_ssl_context_caching() -> None:
|
||||
"""Test that SSLContext instances are cached correctly."""
|
||||
|
||||
assert client_context() is client_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
assert create_no_verify_ssl_context() is create_no_verify_ssl_context(
|
||||
SSLCipherList.PYTHON_DEFAULT
|
||||
)
|
||||
|
||||
|
||||
def test_create_client_context(mock_sslcontext) -> None:
|
||||
"""Test create client context."""
|
||||
with patch("homeassistant.util.ssl.ssl.SSLContext", return_value=mock_sslcontext):
|
||||
client_context()
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
def test_ssl_context_cipher_bucketing() -> None:
|
||||
"""Test that SSL contexts are bucketed by cipher list."""
|
||||
default_ctx = client_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
modern_ctx = client_context(SSLCipherList.MODERN)
|
||||
intermediate_ctx = client_context(SSLCipherList.INTERMEDIATE)
|
||||
insecure_ctx = client_context(SSLCipherList.INSECURE)
|
||||
|
||||
client_context(SSLCipherList.MODERN)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
# Different cipher lists should return different contexts
|
||||
assert default_ctx is not modern_ctx
|
||||
assert default_ctx is not intermediate_ctx
|
||||
assert default_ctx is not insecure_ctx
|
||||
assert modern_ctx is not intermediate_ctx
|
||||
assert modern_ctx is not insecure_ctx
|
||||
assert intermediate_ctx is not insecure_ctx
|
||||
|
||||
client_context(SSLCipherList.INTERMEDIATE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
# Same parameters should return cached context
|
||||
assert client_context(SSLCipherList.PYTHON_DEFAULT) is default_ctx
|
||||
assert client_context(SSLCipherList.MODERN) is modern_ctx
|
||||
|
||||
client_context(SSLCipherList.INSECURE)
|
||||
mock_sslcontext.set_ciphers.assert_not_called()
|
||||
|
||||
def test_no_verify_ssl_context_cipher_bucketing() -> None:
|
||||
"""Test that no-verify SSL contexts are bucketed by cipher list."""
|
||||
default_ctx = create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
modern_ctx = create_no_verify_ssl_context(SSLCipherList.MODERN)
|
||||
|
||||
# Different cipher lists should return different contexts
|
||||
assert default_ctx is not modern_ctx
|
||||
|
||||
# Same parameters should return cached context
|
||||
assert create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT) is default_ctx
|
||||
assert create_no_verify_ssl_context(SSLCipherList.MODERN) is modern_ctx
|
||||
|
||||
|
||||
def test_create_client_context_independent() -> None:
|
||||
@@ -82,3 +62,119 @@ def test_create_client_context_independent() -> None:
|
||||
independent_context_2 = create_client_context()
|
||||
assert shared_context is not independent_context_1
|
||||
assert independent_context_1 is not independent_context_2
|
||||
|
||||
|
||||
def test_ssl_context_alpn_bucketing() -> None:
|
||||
"""Test that SSL contexts are bucketed by ALPN protocols.
|
||||
|
||||
Different ALPN protocol configurations should return different cached contexts
|
||||
to prevent downstream libraries (e.g., httpx/httpcore) from mutating shared
|
||||
contexts with incompatible settings.
|
||||
"""
|
||||
# HTTP/1.1, HTTP/2, and no-ALPN contexts should all be different
|
||||
http1_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
http2_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2)
|
||||
no_alpn_context = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)
|
||||
assert http1_context is not http2_context
|
||||
assert http1_context is not no_alpn_context
|
||||
assert http2_context is not no_alpn_context
|
||||
|
||||
# Same parameters should return cached context
|
||||
assert (
|
||||
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11) is http1_context
|
||||
)
|
||||
assert (
|
||||
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2)
|
||||
is http2_context
|
||||
)
|
||||
assert (
|
||||
client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE) is no_alpn_context
|
||||
)
|
||||
|
||||
# No-verify contexts should also be bucketed by ALPN
|
||||
http1_no_verify = client_context_no_verify(
|
||||
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11
|
||||
)
|
||||
http2_no_verify = client_context_no_verify(
|
||||
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11_HTTP2
|
||||
)
|
||||
no_alpn_no_verify = client_context_no_verify(
|
||||
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE
|
||||
)
|
||||
assert http1_no_verify is not http2_no_verify
|
||||
assert http1_no_verify is not no_alpn_no_verify
|
||||
assert http2_no_verify is not no_alpn_no_verify
|
||||
|
||||
# create_no_verify_ssl_context should also work with ALPN
|
||||
assert (
|
||||
create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
is http1_no_verify
|
||||
)
|
||||
assert (
|
||||
create_no_verify_ssl_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)
|
||||
is no_alpn_no_verify
|
||||
)
|
||||
|
||||
|
||||
def test_ssl_context_insecure_alpn_bucketing() -> None:
|
||||
"""Test that INSECURE cipher list SSL contexts are bucketed by ALPN protocols.
|
||||
|
||||
INSECURE cipher list is used by some integrations that need to connect to
|
||||
devices with outdated TLS implementations.
|
||||
"""
|
||||
# HTTP/1.1, HTTP/2, and no-ALPN contexts should all be different
|
||||
http1_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
|
||||
http2_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2)
|
||||
no_alpn_context = client_context(SSLCipherList.INSECURE, SSL_ALPN_NONE)
|
||||
assert http1_context is not http2_context
|
||||
assert http1_context is not no_alpn_context
|
||||
assert http2_context is not no_alpn_context
|
||||
|
||||
# Same parameters should return cached context
|
||||
assert client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11) is http1_context
|
||||
assert (
|
||||
client_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2) is http2_context
|
||||
)
|
||||
assert client_context(SSLCipherList.INSECURE, SSL_ALPN_NONE) is no_alpn_context
|
||||
|
||||
# No-verify contexts should also be bucketed by ALPN
|
||||
http1_no_verify = client_context_no_verify(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
|
||||
http2_no_verify = client_context_no_verify(
|
||||
SSLCipherList.INSECURE, SSL_ALPN_HTTP11_HTTP2
|
||||
)
|
||||
no_alpn_no_verify = client_context_no_verify(SSLCipherList.INSECURE, SSL_ALPN_NONE)
|
||||
assert http1_no_verify is not http2_no_verify
|
||||
assert http1_no_verify is not no_alpn_no_verify
|
||||
assert http2_no_verify is not no_alpn_no_verify
|
||||
|
||||
# create_no_verify_ssl_context should also work with ALPN
|
||||
assert (
|
||||
create_no_verify_ssl_context(SSLCipherList.INSECURE, SSL_ALPN_HTTP11)
|
||||
is http1_no_verify
|
||||
)
|
||||
assert (
|
||||
create_no_verify_ssl_context(SSLCipherList.INSECURE, SSL_ALPN_NONE)
|
||||
is no_alpn_no_verify
|
||||
)
|
||||
|
||||
|
||||
def test_get_default_context_uses_http1_alpn() -> None:
|
||||
"""Test that get_default_context returns context with HTTP1 ALPN."""
|
||||
default_ctx = get_default_context()
|
||||
default_no_verify_ctx = get_default_no_verify_context()
|
||||
|
||||
# Default contexts should be the same as explicitly requesting HTTP1 ALPN
|
||||
assert default_ctx is client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
assert default_no_verify_ctx is client_context_no_verify(
|
||||
SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
|
||||
def test_client_context_default_no_alpn() -> None:
|
||||
"""Test that client_context defaults to no ALPN for backward compatibility."""
|
||||
# Default (no ALPN) should be different from HTTP1 ALPN
|
||||
default_ctx = client_context()
|
||||
http1_ctx = client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
assert default_ctx is not http1_ctx
|
||||
assert default_ctx is client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_NONE)
|
||||
|
||||
Reference in New Issue
Block a user