Address review comments

This commit is contained in:
Erik
2025-06-19 10:50:15 +02:00
parent fe05175adc
commit 2f14f16637
4 changed files with 25 additions and 28 deletions

View File

@@ -561,8 +561,8 @@ async def handle_subscribe_trigger_platforms(
hass, on_new_triggers hass, on_new_triggers
) )
connection.send_result(msg["id"]) connection.send_result(msg["id"])
event = await _async_get_all_trigger_descriptions_json(hass) triggers_json = await _async_get_all_trigger_descriptions_json(hass)
connection.send_message(construct_event_message(msg["id"], event)) connection.send_message(construct_event_message(msg["id"], triggers_json))
@callback @callback

View File

@@ -63,15 +63,14 @@ DATA_PLUGGABLE_ACTIONS: HassKey[defaultdict[tuple, PluggableActionsEntry]] = Has
TRIGGER_DESCRIPTION_CACHE: HassKey[dict[str, dict[str, Any]]] = HassKey( TRIGGER_DESCRIPTION_CACHE: HassKey[dict[str, dict[str, Any]]] = HassKey(
"trigger_description_cache" "trigger_description_cache"
) )
ALL_TRIGGER_DESCRIPTIONS_CACHE: HassKey[dict[str, dict[str, Any]]] = HassKey(
"all_trigger_descriptions_cache"
)
TRIGGER_PLATFORM_SUBSCRIPTIONS: HassKey[ TRIGGER_PLATFORM_SUBSCRIPTIONS: HassKey[
list[Callable[[set[str]], Coroutine[Any, Any, None]]] list[Callable[[set[str]], Coroutine[Any, Any, None]]]
] = HassKey("trigger_platform_subscriptions") ] = HassKey("trigger_platform_subscriptions")
TRIGGERS: HassKey[dict[str, str]] = HassKey("triggers") TRIGGERS: HassKey[dict[str, str]] = HassKey("triggers")
# Basic schemas to sanity check the trigger descriptions,
# full validation is done by hassfest.services
_FIELD_SCHEMA = vol.Schema( _FIELD_SCHEMA = vol.Schema(
{}, {},
extra=vol.ALLOW_EXTRA, extra=vol.ALLOW_EXTRA,
@@ -148,7 +147,7 @@ async def _register_trigger_platform(
) )
return return
tasks: list[asyncio.Task] = [ tasks: list[asyncio.Task[None]] = [
create_eager_task(listener(new_triggers)) create_eager_task(listener(new_triggers))
for listener in hass.data[TRIGGER_PLATFORM_SUBSCRIPTIONS] for listener in hass.data[TRIGGER_PLATFORM_SUBSCRIPTIONS]
] ]
@@ -559,12 +558,10 @@ async def async_get_all_descriptions(
# See if there are new triggers not seen before. # See if there are new triggers not seen before.
# Any trigger that we saw before already has an entry in description_cache. # Any trigger that we saw before already has an entry in description_cache.
all_triggers = set(triggers) all_triggers = set(triggers)
# If we have a complete cache, check if it is still valid previous_all_triggers = set(descriptions_cache)
if all_cache := hass.data.get(ALL_TRIGGER_DESCRIPTIONS_CACHE): # If the triggers are the same, we can return the cache
previous_all_triggers = set(all_cache) if previous_all_triggers == all_triggers:
# If the triggers are the same, we can return the cache return descriptions_cache
if previous_all_triggers == all_triggers:
return all_cache
# Files we loaded for missing descriptions # Files we loaded for missing descriptions
loaded: dict[str, JSON_TYPE] = {} loaded: dict[str, JSON_TYPE] = {}
@@ -597,25 +594,19 @@ async def async_get_all_descriptions(
_load_triggers_files, hass, integrations _load_triggers_files, hass, integrations
) )
# Build response # Add missing descriptions to the cache
descriptions: dict[str, dict[str, Any]] = {} for missing_trigger in missing_triggers:
for trigger_name, domain in triggers.items(): domain = triggers[missing_trigger]
description = descriptions_cache.get(trigger_name)
if description is not None:
descriptions[trigger_name] = description
continue
# Cache missing descriptions # Cache missing descriptions
domain_yaml = loaded.get(domain) or {} domain_yaml = loaded.get(domain) or {}
yaml_description = ( yaml_description = (
domain_yaml.get(trigger_name) or {} # type: ignore[union-attr] domain_yaml.get(missing_trigger) or {} # type: ignore[union-attr]
) )
description = {"fields": yaml_description.get("fields", {})} description = {"fields": yaml_description.get("fields", {})}
descriptions_cache[trigger_name] = description descriptions_cache[missing_trigger] = description
descriptions[trigger_name] = description
hass.data[ALL_TRIGGER_DESCRIPTIONS_CACHE] = descriptions return descriptions_cache
return descriptions

View File

@@ -182,7 +182,10 @@ def validate_triggers(config: Config, integration: Integration) -> None: # noqa
except KeyError: except KeyError:
integration.add_error( integration.add_error(
"triggers", "triggers",
f"Trigger {trigger_name} has a field {field_name} with no name {error_msg_suffix}", (
f"Trigger {trigger_name} has a field {field_name} with no "
f"name {error_msg_suffix}"
),
) )
if "description" not in field_schema and integration.core: if "description" not in field_schema and integration.core:
@@ -193,7 +196,10 @@ def validate_triggers(config: Config, integration: Integration) -> None: # noqa
except KeyError: except KeyError:
integration.add_error( integration.add_error(
"triggers", "triggers",
f"Trigger {trigger_name} has a field {field_name} with no description {error_msg_suffix}", (
f"Trigger {trigger_name} has a field {field_name} with no "
f"description {error_msg_suffix}"
),
) )
if "selector" in field_schema: if "selector" in field_schema:

View File

@@ -694,10 +694,10 @@ async def test_subscribe_triggers(
assert msg == {"event": {"sun": {"fields": {}}}, "id": 1, "type": "event"} assert msg == {"event": {"sun": {"fields": {}}}, "id": 1, "type": "event"}
# Test we receive an event when a new platform is loaded # Test we receive an event when a new platform is loaded
assert await async_setup_component(hass, "persistent_notification", {}) assert await async_setup_component(hass, "tag", {})
msg = await websocket_client.receive_json() msg = await websocket_client.receive_json()
assert msg == { assert msg == {
"event": {"persistent_notification": {"fields": {}}}, "event": {"tag": {"fields": {}}},
"id": 1, "id": 1,
"type": "event", "type": "event",
} }