mirror of
https://github.com/home-assistant/core.git
synced 2026-01-18 05:26:52 +01:00
Compare commits
18 Commits
supportpac
...
matter_mut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
253b32abd6 | ||
|
|
cc20072c86 | ||
|
|
f86db56d48 | ||
|
|
3e2ebb8ebb | ||
|
|
6e7b206788 | ||
|
|
cee007b0b0 | ||
|
|
bd24c27bc9 | ||
|
|
49bd26da86 | ||
|
|
49c42b9ad0 | ||
|
|
411491dc45 | ||
|
|
47383a499e | ||
|
|
f9aa307cb2 | ||
|
|
7c6a31861e | ||
|
|
b2b25ca28c | ||
|
|
ad9efab16a | ||
|
|
e967d33911 | ||
|
|
86bacdbdde | ||
|
|
644a40674d |
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -247,17 +247,11 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Register yamllint problem matcher
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # v1.0.12
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"regexp": "^(.+):\\s(marked executable but has no \\(or invalid\\) shebang!.*)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import attr
|
||||
from hass_nabucasa import AlreadyConnectedError, Cloud, auth
|
||||
from hass_nabucasa.const import STATE_DISCONNECTED
|
||||
from hass_nabucasa.voice_data import TTS_VOICES
|
||||
import psutil_home_assistant as ha_psutil
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
@@ -28,7 +27,6 @@ from homeassistant.components.alexa import (
|
||||
errors as alexa_errors,
|
||||
)
|
||||
from homeassistant.components.google_assistant import helpers as google_helpers
|
||||
from homeassistant.components.hassio import get_addons_stats, get_supervisor_info
|
||||
from homeassistant.components.homeassistant import exposed_entities
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
@@ -39,7 +37,6 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.loader import (
|
||||
async_get_custom_components,
|
||||
async_get_loaded_integration,
|
||||
@@ -574,11 +571,6 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
"</details>\n\n"
|
||||
)
|
||||
|
||||
markdown += await self._get_host_resources_markdown(hass)
|
||||
|
||||
if is_hassio(hass):
|
||||
markdown += await self._get_addon_resources_markdown(hass)
|
||||
|
||||
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER]
|
||||
logs = "\n".join(await log_handler.get_logs(hass))
|
||||
markdown += (
|
||||
@@ -592,103 +584,6 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
|
||||
return markdown
|
||||
|
||||
async def _get_host_resources_markdown(self, hass: HomeAssistant) -> str:
|
||||
"""Get host resource usage markdown using psutil."""
|
||||
|
||||
def _collect_system_stats() -> dict[str, Any]:
|
||||
"""Collect system stats."""
|
||||
psutil_wrapper = ha_psutil.PsutilWrapper()
|
||||
psutil_mod = psutil_wrapper.psutil
|
||||
|
||||
cpu_percent = psutil_mod.cpu_percent(interval=0.1)
|
||||
memory = psutil_mod.virtual_memory()
|
||||
disk = psutil_mod.disk_usage("/")
|
||||
|
||||
return {
|
||||
"cpu_percent": cpu_percent,
|
||||
"memory_total": memory.total,
|
||||
"memory_used": memory.used,
|
||||
"memory_available": memory.available,
|
||||
"memory_percent": memory.percent,
|
||||
"disk_total": disk.total,
|
||||
"disk_used": disk.used,
|
||||
"disk_free": disk.free,
|
||||
"disk_percent": disk.percent,
|
||||
}
|
||||
|
||||
markdown = ""
|
||||
try:
|
||||
stats = await hass.async_add_executor_job(_collect_system_stats)
|
||||
|
||||
markdown += "## Host resource usage\n\n"
|
||||
markdown += "Resource | Value\n"
|
||||
markdown += "--- | ---\n"
|
||||
|
||||
markdown += f"CPU usage | {stats['cpu_percent']}%\n"
|
||||
|
||||
memory_total_gb = round(stats["memory_total"] / (1024**3), 2)
|
||||
memory_used_gb = round(stats["memory_used"] / (1024**3), 2)
|
||||
memory_available_gb = round(stats["memory_available"] / (1024**3), 2)
|
||||
markdown += f"Memory total | {memory_total_gb} GB\n"
|
||||
markdown += (
|
||||
f"Memory used | {memory_used_gb} GB ({stats['memory_percent']}%)\n"
|
||||
)
|
||||
markdown += f"Memory available | {memory_available_gb} GB\n"
|
||||
|
||||
disk_total_gb = round(stats["disk_total"] / (1024**3), 2)
|
||||
disk_used_gb = round(stats["disk_used"] / (1024**3), 2)
|
||||
disk_free_gb = round(stats["disk_free"] / (1024**3), 2)
|
||||
markdown += f"Disk total | {disk_total_gb} GB\n"
|
||||
markdown += f"Disk used | {disk_used_gb} GB ({stats['disk_percent']}%)\n"
|
||||
markdown += f"Disk free | {disk_free_gb} GB\n"
|
||||
|
||||
markdown += "\n"
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package generation
|
||||
markdown += "## Host resource usage\n\n"
|
||||
markdown += "Unable to collect host resource information\n\n"
|
||||
|
||||
return markdown
|
||||
|
||||
async def _get_addon_resources_markdown(self, hass: HomeAssistant) -> str:
|
||||
"""Get add-on resource usage markdown for hassio."""
|
||||
markdown = ""
|
||||
try:
|
||||
supervisor_info = get_supervisor_info(hass) or {}
|
||||
addons_stats = get_addons_stats(hass)
|
||||
addons = supervisor_info.get("addons", [])
|
||||
|
||||
if addons:
|
||||
markdown += "## Add-on resource usage\n\n"
|
||||
markdown += "<details><summary>Add-on resources</summary>\n\n"
|
||||
markdown += "Add-on | Version | State | CPU | Memory\n"
|
||||
markdown += "--- | --- | --- | --- | ---\n"
|
||||
|
||||
for addon in addons:
|
||||
slug = addon.get("slug", "unknown")
|
||||
name = addon.get("name", slug)
|
||||
version = addon.get("version", "unknown")
|
||||
state = addon.get("state", "unknown")
|
||||
|
||||
addon_stats = addons_stats.get(slug, {})
|
||||
cpu = addon_stats.get("cpu_percent")
|
||||
memory = addon_stats.get("memory_percent")
|
||||
|
||||
cpu_str = f"{cpu}%" if cpu is not None else "N/A"
|
||||
memory_str = f"{memory}%" if memory is not None else "N/A"
|
||||
|
||||
markdown += (
|
||||
f"{name} | {version} | {state} | {cpu_str} | {memory_str}\n"
|
||||
)
|
||||
|
||||
markdown += "\n</details>\n\n"
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package generation
|
||||
markdown += "## Add-on resource usage\n\n"
|
||||
markdown += "Unable to collect add-on resource information\n\n"
|
||||
|
||||
return markdown
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Download support package file."""
|
||||
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
"alexa",
|
||||
"assist_pipeline",
|
||||
"backup",
|
||||
"google_assistant",
|
||||
"hassio"
|
||||
"google_assistant"
|
||||
],
|
||||
"codeowners": ["@home-assistant/cloud"],
|
||||
"dependencies": ["auth", "http", "repairs", "webhook", "web_rtc"],
|
||||
@@ -14,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.9.0", "psutil-home-assistant==0.0.1"],
|
||||
"requirements": ["hass-nabucasa==1.9.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -49,11 +49,11 @@ def setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Concord232 alarm control panel platform."""
|
||||
name = config[CONF_NAME]
|
||||
code = config.get(CONF_CODE)
|
||||
mode = config[CONF_MODE]
|
||||
host = config[CONF_HOST]
|
||||
port = config[CONF_PORT]
|
||||
name: str = config[CONF_NAME]
|
||||
code: str | None = config.get(CONF_CODE)
|
||||
mode: str = config[CONF_MODE]
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
|
||||
url = f"http://{host}:{port}"
|
||||
|
||||
@@ -72,7 +72,7 @@ class Concord232Alarm(AlarmControlPanelEntity):
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
|
||||
def __init__(self, url, name, code, mode):
|
||||
def __init__(self, url: str, name: str, code: str | None, mode: str) -> None:
|
||||
"""Initialize the Concord232 alarm panel."""
|
||||
|
||||
self._attr_name = name
|
||||
@@ -125,7 +125,7 @@ class Concord232Alarm(AlarmControlPanelEntity):
|
||||
return
|
||||
self._alarm.arm("away")
|
||||
|
||||
def _validate_code(self, code, state):
|
||||
def _validate_code(self, code: str | None, state: AlarmControlPanelState) -> bool:
|
||||
"""Validate given code."""
|
||||
if self._code is None:
|
||||
return True
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from concord232 import client as concord232_client
|
||||
import requests
|
||||
@@ -29,8 +30,7 @@ CONF_ZONE_TYPES = "zone_types"
|
||||
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_NAME = "Alarm"
|
||||
DEFAULT_PORT = "5007"
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_PORT = 5007
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=10)
|
||||
|
||||
@@ -56,10 +56,10 @@ def setup_platform(
|
||||
) -> None:
|
||||
"""Set up the Concord232 binary sensor platform."""
|
||||
|
||||
host = config[CONF_HOST]
|
||||
port = config[CONF_PORT]
|
||||
exclude = config[CONF_EXCLUDE_ZONES]
|
||||
zone_types = config[CONF_ZONE_TYPES]
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
exclude: list[int] = config[CONF_EXCLUDE_ZONES]
|
||||
zone_types: dict[int, BinarySensorDeviceClass] = config[CONF_ZONE_TYPES]
|
||||
sensors = []
|
||||
|
||||
try:
|
||||
@@ -84,7 +84,6 @@ def setup_platform(
|
||||
if zone["number"] not in exclude:
|
||||
sensors.append(
|
||||
Concord232ZoneSensor(
|
||||
hass,
|
||||
client,
|
||||
zone,
|
||||
zone_types.get(zone["number"], get_opening_type(zone)),
|
||||
@@ -110,26 +109,25 @@ def get_opening_type(zone):
|
||||
class Concord232ZoneSensor(BinarySensorEntity):
|
||||
"""Representation of a Concord232 zone as a sensor."""
|
||||
|
||||
def __init__(self, hass, client, zone, zone_type):
|
||||
def __init__(
|
||||
self,
|
||||
client: concord232_client.Client,
|
||||
zone: dict[str, Any],
|
||||
zone_type: BinarySensorDeviceClass,
|
||||
) -> None:
|
||||
"""Initialize the Concord232 binary sensor."""
|
||||
self._hass = hass
|
||||
self._client = client
|
||||
self._zone = zone
|
||||
self._number = zone["number"]
|
||||
self._zone_type = zone_type
|
||||
self._attr_device_class = zone_type
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass:
|
||||
"""Return the class of this sensor, from DEVICE_CLASSES."""
|
||||
return self._zone_type
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the binary sensor."""
|
||||
return self._zone["name"]
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
# True means "faulted" or "open" or "abnormal state"
|
||||
return bool(self._zone["state"] != "Normal")
|
||||
@@ -145,5 +143,5 @@ class Concord232ZoneSensor(BinarySensorEntity):
|
||||
|
||||
if hasattr(self._client, "zones"):
|
||||
self._zone = next(
|
||||
(x for x in self._client.zones if x["number"] == self._number), None
|
||||
x for x in self._client.zones if x["number"] == self._number
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,7 +36,7 @@ DEFAULT_REPORT_SERVER_PORT = 52010
|
||||
DEFAULT_VERSION = "GATE-01"
|
||||
DOMAIN = "egardia"
|
||||
|
||||
EGARDIA_DEVICE = "egardiadevice"
|
||||
EGARDIA_DEVICE: HassKey[egardiadevice.EgardiaDevice] = HassKey(DOMAIN)
|
||||
EGARDIA_NAME = "egardianame"
|
||||
EGARDIA_REPORT_SERVER_CODES = "egardia_rs_codes"
|
||||
EGARDIA_REPORT_SERVER_ENABLED = "egardia_rs_enabled"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pythonegardia.egardiadevice import EgardiaDevice
|
||||
import requests
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
@@ -11,6 +12,7 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -47,10 +49,10 @@ def setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
device = EgardiaAlarm(
|
||||
discovery_info["name"],
|
||||
discovery_info[CONF_NAME],
|
||||
hass.data[EGARDIA_DEVICE],
|
||||
discovery_info[CONF_REPORT_SERVER_ENABLED],
|
||||
discovery_info.get(CONF_REPORT_SERVER_CODES),
|
||||
discovery_info[CONF_REPORT_SERVER_CODES],
|
||||
discovery_info[CONF_REPORT_SERVER_PORT],
|
||||
)
|
||||
|
||||
@@ -67,8 +69,13 @@ class EgardiaAlarm(AlarmControlPanelEntity):
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, name, egardiasystem, rs_enabled=False, rs_codes=None, rs_port=52010
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
egardiasystem: EgardiaDevice,
|
||||
rs_enabled: bool,
|
||||
rs_codes: dict[str, list[str]],
|
||||
rs_port: int,
|
||||
) -> None:
|
||||
"""Initialize the Egardia alarm."""
|
||||
self._attr_name = name
|
||||
self._egardiasystem = egardiasystem
|
||||
@@ -85,9 +92,7 @@ class EgardiaAlarm(AlarmControlPanelEntity):
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Poll if no report server is enabled."""
|
||||
if not self._rs_enabled:
|
||||
return True
|
||||
return False
|
||||
return not self._rs_enabled
|
||||
|
||||
def handle_status_event(self, event):
|
||||
"""Handle the Egardia system status event."""
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pythonegardia.egardiadevice import EgardiaDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -51,30 +52,20 @@ async def async_setup_platform(
|
||||
class EgardiaBinarySensor(BinarySensorEntity):
|
||||
"""Represents a sensor based on an Egardia sensor (IR, Door Contact)."""
|
||||
|
||||
def __init__(self, sensor_id, name, egardia_system, device_class):
|
||||
def __init__(
|
||||
self,
|
||||
sensor_id: str,
|
||||
name: str,
|
||||
egardia_system: EgardiaDevice,
|
||||
device_class: BinarySensorDeviceClass | None,
|
||||
) -> None:
|
||||
"""Initialize the sensor device."""
|
||||
self._id = sensor_id
|
||||
self._name = name
|
||||
self._state = None
|
||||
self._device_class = device_class
|
||||
self._attr_name = name
|
||||
self._attr_device_class = device_class
|
||||
self._egardia_system = egardia_system
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the status."""
|
||||
egardia_input = self._egardia_system.getsensorstate(self._id)
|
||||
self._state = STATE_ON if egardia_input else STATE_OFF
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Whether the device is switched on."""
|
||||
return self._state == STATE_ON
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass | None:
|
||||
"""Return the device class."""
|
||||
return self._device_class
|
||||
self._attr_is_on = bool(egardia_input)
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260107.1"]
|
||||
"requirements": ["home-assistant-frontend==20260107.2"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.14.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.12.30.151231"
|
||||
"knx-frontend==2026.1.15.112308"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -18,7 +18,11 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
@@ -37,6 +41,7 @@ __all__ = [
|
||||
"EventLabsUpdatedData",
|
||||
"async_is_preview_feature_enabled",
|
||||
"async_listen",
|
||||
"async_update_preview_feature",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -61,3 +61,32 @@ def async_listen(
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
|
||||
async def async_update_preview_feature(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
enabled: bool,
|
||||
) -> None:
|
||||
"""Update a lab preview feature state."""
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
|
||||
preview_feature_id = f"{domain}.{preview_feature}"
|
||||
|
||||
if preview_feature_id not in labs_data.preview_features:
|
||||
raise ValueError(f"Preview feature {preview_feature_id} not found")
|
||||
|
||||
if enabled:
|
||||
labs_data.data.preview_feature_status.add((domain, preview_feature))
|
||||
else:
|
||||
labs_data.data.preview_feature_status.discard((domain, preview_feature))
|
||||
|
||||
await labs_data.store.async_save(labs_data.data.to_store_format())
|
||||
|
||||
event_data: EventLabsUpdatedData = {
|
||||
"domain": domain,
|
||||
"preview_feature": preview_feature,
|
||||
"enabled": enabled,
|
||||
}
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
|
||||
@@ -8,12 +8,14 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.backup import async_get_manager
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import EventLabsUpdatedData
|
||||
from .helpers import (
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -95,19 +97,7 @@ async def websocket_update_preview_feature(
|
||||
)
|
||||
return
|
||||
|
||||
if enabled:
|
||||
labs_data.data.preview_feature_status.add((domain, preview_feature))
|
||||
else:
|
||||
labs_data.data.preview_feature_status.discard((domain, preview_feature))
|
||||
|
||||
await labs_data.store.async_save(labs_data.data.to_store_format())
|
||||
|
||||
event_data: EventLabsUpdatedData = {
|
||||
"domain": domain,
|
||||
"preview_feature": preview_feature,
|
||||
"enabled": enabled,
|
||||
}
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
await async_update_preview_feature(hass, domain, preview_feature, enabled)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from .coordinator import MastodonConfigEntry, MastodonCoordinator, MastodonData
|
||||
from .services import async_setup_services
|
||||
from .utils import construct_mastodon_username, create_mastodon_client
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
128
homeassistant/components/mastodon/binary_sensor.py
Normal file
128
homeassistant/components/mastodon/binary_sensor.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""Binary sensor platform for the Mastodon integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from mastodon.Mastodon import Account
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import MastodonConfigEntry
|
||||
from .entity import MastodonEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class MastodonBinarySensor(StrEnum):
|
||||
"""Mastodon binary sensors."""
|
||||
|
||||
BOT = "bot"
|
||||
SUSPENDED = "suspended"
|
||||
DISCOVERABLE = "discoverable"
|
||||
LOCKED = "locked"
|
||||
INDEXABLE = "indexable"
|
||||
LIMITED = "limited"
|
||||
MEMORIAL = "memorial"
|
||||
MOVED = "moved"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class MastodonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Mastodon binary sensor description."""
|
||||
|
||||
is_on_fn: Callable[[Account], bool | None]
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[MastodonBinarySensorEntityDescription, ...] = (
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.BOT,
|
||||
translation_key=MastodonBinarySensor.BOT,
|
||||
is_on_fn=lambda account: account.bot,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.DISCOVERABLE,
|
||||
translation_key=MastodonBinarySensor.DISCOVERABLE,
|
||||
is_on_fn=lambda account: account.discoverable,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.LOCKED,
|
||||
translation_key=MastodonBinarySensor.LOCKED,
|
||||
is_on_fn=lambda account: account.locked,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.MOVED,
|
||||
translation_key=MastodonBinarySensor.MOVED,
|
||||
is_on_fn=lambda account: account.moved is not None,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.INDEXABLE,
|
||||
translation_key=MastodonBinarySensor.INDEXABLE,
|
||||
is_on_fn=lambda account: account.indexable,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.LIMITED,
|
||||
translation_key=MastodonBinarySensor.LIMITED,
|
||||
is_on_fn=lambda account: account.limited is True,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.MEMORIAL,
|
||||
translation_key=MastodonBinarySensor.MEMORIAL,
|
||||
is_on_fn=lambda account: account.memorial is True,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
MastodonBinarySensorEntityDescription(
|
||||
key=MastodonBinarySensor.SUSPENDED,
|
||||
translation_key=MastodonBinarySensor.SUSPENDED,
|
||||
is_on_fn=lambda account: account.suspended is True,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: MastodonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the binary sensor platform."""
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
async_add_entities(
|
||||
MastodonBinarySensorEntity(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
data=entry,
|
||||
)
|
||||
for entity_description in ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class MastodonBinarySensorEntity(MastodonEntity, BinarySensorEntity):
|
||||
"""Mastodon binary sensor entity."""
|
||||
|
||||
entity_description: MastodonBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator.data)
|
||||
@@ -1,5 +1,18 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bot": { "default": "mdi:robot" },
|
||||
"discoverable": { "default": "mdi:magnify-scan" },
|
||||
"indexable": { "default": "mdi:search-web" },
|
||||
"limited": { "default": "mdi:account-cancel" },
|
||||
"locked": {
|
||||
"default": "mdi:account-lock",
|
||||
"state": { "off": "mdi:account-lock-open" }
|
||||
},
|
||||
"memorial": { "default": "mdi:candle" },
|
||||
"moved": { "default": "mdi:truck-delivery" },
|
||||
"suspended": { "default": "mdi:account-off" }
|
||||
},
|
||||
"sensor": {
|
||||
"followers": {
|
||||
"default": "mdi:account-multiple"
|
||||
|
||||
@@ -26,6 +26,16 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bot": { "name": "Bot" },
|
||||
"discoverable": { "name": "Discoverable" },
|
||||
"indexable": { "name": "Indexable" },
|
||||
"limited": { "name": "Limited" },
|
||||
"locked": { "name": "Locked" },
|
||||
"memorial": { "name": "Memorial" },
|
||||
"moved": { "name": "Moved" },
|
||||
"suspended": { "name": "Suspended" }
|
||||
},
|
||||
"sensor": {
|
||||
"followers": {
|
||||
"name": "Followers",
|
||||
|
||||
@@ -489,6 +489,7 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="WindowCoveringConfigStatusOperational",
|
||||
translation_key="config_status_operational",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# unset Operational bit from ConfigStatus bitmap means problem
|
||||
|
||||
@@ -56,6 +56,9 @@
|
||||
"boost_state": {
|
||||
"name": "Boost state"
|
||||
},
|
||||
"config_status_operational": {
|
||||
"name": "Configuration status"
|
||||
},
|
||||
"dishwasher_alarm_inflow": {
|
||||
"name": "Inflow alarm"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.1.1"]
|
||||
"requirements": ["aiomealie==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from operator import itemgetter
|
||||
from typing import Any
|
||||
|
||||
import oasatelematics
|
||||
import voluptuous as vol
|
||||
@@ -55,9 +56,9 @@ def setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the OASA Telematics sensor."""
|
||||
name = config[CONF_NAME]
|
||||
stop_id = config[CONF_STOP_ID]
|
||||
route_id = config.get(CONF_ROUTE_ID)
|
||||
name: str = config[CONF_NAME]
|
||||
stop_id: str = config[CONF_STOP_ID]
|
||||
route_id: str = config[CONF_ROUTE_ID]
|
||||
|
||||
data = OASATelematicsData(stop_id, route_id)
|
||||
|
||||
@@ -68,42 +69,31 @@ class OASATelematicsSensor(SensorEntity):
|
||||
"""Implementation of the OASA Telematics sensor."""
|
||||
|
||||
_attr_attribution = "Data retrieved from telematics.oasa.gr"
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_icon = "mdi:bus"
|
||||
|
||||
def __init__(self, data, stop_id, route_id, name):
|
||||
def __init__(
|
||||
self, data: OASATelematicsData, stop_id: str, route_id: str, name: str
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.data = data
|
||||
self._name = name
|
||||
self._attr_name = name
|
||||
self._stop_id = stop_id
|
||||
self._route_id = route_id
|
||||
self._name_data = self._times = self._state = None
|
||||
self._name_data: dict[str, Any] | None = None
|
||||
self._times: list[dict[str, Any]] | None = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def device_class(self) -> SensorDeviceClass:
|
||||
"""Return the class of this sensor."""
|
||||
return SensorDeviceClass.TIMESTAMP
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
params = {}
|
||||
if self._times is not None:
|
||||
next_arrival_data = self._times[0]
|
||||
if ATTR_NEXT_ARRIVAL in next_arrival_data:
|
||||
next_arrival = next_arrival_data[ATTR_NEXT_ARRIVAL]
|
||||
next_arrival: datetime = next_arrival_data[ATTR_NEXT_ARRIVAL]
|
||||
params.update({ATTR_NEXT_ARRIVAL: next_arrival.isoformat()})
|
||||
if len(self._times) > 1:
|
||||
second_next_arrival_time = self._times[1][ATTR_NEXT_ARRIVAL]
|
||||
second_next_arrival_time: datetime = self._times[1][ATTR_NEXT_ARRIVAL]
|
||||
if second_next_arrival_time is not None:
|
||||
second_arrival = second_next_arrival_time
|
||||
params.update(
|
||||
@@ -115,12 +105,13 @@ class OASATelematicsSensor(SensorEntity):
|
||||
ATTR_STOP_ID: self._stop_id,
|
||||
}
|
||||
)
|
||||
params.update(
|
||||
{
|
||||
ATTR_ROUTE_NAME: self._name_data[ATTR_ROUTE_NAME],
|
||||
ATTR_STOP_NAME: self._name_data[ATTR_STOP_NAME],
|
||||
}
|
||||
)
|
||||
if self._name_data is not None:
|
||||
params.update(
|
||||
{
|
||||
ATTR_ROUTE_NAME: self._name_data[ATTR_ROUTE_NAME],
|
||||
ATTR_STOP_NAME: self._name_data[ATTR_STOP_NAME],
|
||||
}
|
||||
)
|
||||
return {k: v for k, v in params.items() if v}
|
||||
|
||||
def update(self) -> None:
|
||||
@@ -130,7 +121,7 @@ class OASATelematicsSensor(SensorEntity):
|
||||
self._name_data = self.data.name_data
|
||||
next_arrival_data = self._times[0]
|
||||
if ATTR_NEXT_ARRIVAL in next_arrival_data:
|
||||
self._state = next_arrival_data[ATTR_NEXT_ARRIVAL]
|
||||
self._attr_native_value = next_arrival_data[ATTR_NEXT_ARRIVAL]
|
||||
|
||||
|
||||
class OASATelematicsData:
|
||||
|
||||
@@ -259,7 +259,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bo
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bool:
|
||||
"""Unload OpenAI."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -280,7 +280,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||
if not any(entry.version == 1 for entry in entries):
|
||||
return
|
||||
|
||||
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
|
||||
api_keys_entries: dict[str, tuple[OpenAIConfigEntry, bool]] = {}
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from typing import TYPE_CHECKING
|
||||
from openai.types.responses.response_output_item import ImageGenerationCall
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -35,7 +34,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: OpenAIConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
|
||||
@@ -24,9 +24,9 @@ from homeassistant.helpers.discovery import load_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DATA_QUIKSWITCH, DOMAIN
|
||||
|
||||
DOMAIN = "qwikswitch"
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_DIMMER_ADJUST = "dimmer_adjust"
|
||||
CONF_BUTTON_EVENTS = "button_events"
|
||||
@@ -96,7 +96,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if not await qsusb.update_from_devices():
|
||||
return False
|
||||
|
||||
hass.data[DOMAIN] = qsusb
|
||||
hass.data[DATA_QUIKSWITCH] = qsusb
|
||||
|
||||
comps: dict[Platform, list] = {
|
||||
Platform.SWITCH: [],
|
||||
@@ -168,7 +168,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@callback
|
||||
def async_stop(_):
|
||||
"""Stop the listener."""
|
||||
hass.data[DOMAIN].stop()
|
||||
hass.data[DATA_QUIKSWITCH].stop()
|
||||
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, async_stop)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
from .const import DATA_QUIKSWITCH, DOMAIN
|
||||
from .entity import QSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -30,7 +30,7 @@ async def async_setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
qsusb = hass.data[DOMAIN]
|
||||
qsusb = hass.data[DATA_QUIKSWITCH]
|
||||
_LOGGER.debug("Setup qwikswitch.binary_sensor %s, %s", qsusb, discovery_info)
|
||||
devs = [QSBinarySensor(sensor) for sensor in discovery_info[DOMAIN]]
|
||||
add_entities(devs)
|
||||
|
||||
13
homeassistant/components/qwikswitch/const.py
Normal file
13
homeassistant/components/qwikswitch/const.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Support for Qwikswitch devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pyqwikswitch.async_ import QSUsb
|
||||
|
||||
DOMAIN = "qwikswitch"
|
||||
DATA_QUIKSWITCH: HassKey[QSUsb] = HassKey(DOMAIN)
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import DOMAIN
|
||||
from .const import DATA_QUIKSWITCH
|
||||
|
||||
|
||||
class QSEntity(Entity):
|
||||
@@ -67,8 +67,8 @@ class QSToggleEntity(QSEntity):
|
||||
async def async_turn_on(self, **kwargs):
|
||||
"""Turn the device on."""
|
||||
new = kwargs.get(ATTR_BRIGHTNESS, 255)
|
||||
self.hass.data[DOMAIN].devices.set_value(self.qsid, new)
|
||||
self.hass.data[DATA_QUIKSWITCH].devices.set_value(self.qsid, new)
|
||||
|
||||
async def async_turn_off(self, **_):
|
||||
"""Turn the device off."""
|
||||
self.hass.data[DOMAIN].devices.set_value(self.qsid, 0)
|
||||
self.hass.data[DATA_QUIKSWITCH].devices.set_value(self.qsid, 0)
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
from .const import DATA_QUIKSWITCH, DOMAIN
|
||||
from .entity import QSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -28,7 +28,7 @@ async def async_setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
qsusb = hass.data[DOMAIN]
|
||||
qsusb = hass.data[DATA_QUIKSWITCH]
|
||||
_LOGGER.debug("Setup qwikswitch.sensor %s, %s", qsusb, discovery_info)
|
||||
devs = [QSSensor(sensor) for sensor in discovery_info[DOMAIN]]
|
||||
add_entities(devs)
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
from .const import DATA_QUIKSWITCH, DOMAIN
|
||||
from .entity import QSToggleEntity
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ async def async_setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
qsusb = hass.data[DOMAIN]
|
||||
qsusb = hass.data[DATA_QUIKSWITCH]
|
||||
devs = [QSSwitch(qsid, qsusb) for qsid in discovery_info[DOMAIN]]
|
||||
add_entities(devs)
|
||||
|
||||
|
||||
@@ -144,6 +144,51 @@ class SmaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
if user_input is not None:
|
||||
errors, device_info = await self._handle_user_input(
|
||||
user_input={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
}
|
||||
)
|
||||
|
||||
if not errors:
|
||||
await self.async_set_unique_id(
|
||||
str(device_info["serial"]), raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_SSL: user_input[CONF_SSL],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
CONF_GROUP: user_input[CONF_GROUP],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_SSL): cv.boolean,
|
||||
vol.Optional(CONF_VERIFY_SSL): cv.boolean,
|
||||
vol.Optional(CONF_GROUP): vol.In(GROUPS),
|
||||
}
|
||||
),
|
||||
suggested_values=user_input or dict(reconf_entry.data),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "You selected a different SMA device than the one this config entry was configured with, this is not allowed."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -29,6 +31,16 @@
|
||||
"description": "The SMA integration needs to re-authenticate your connection details",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"group": "[%key:component::sma::config::step::user::data::group%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"ssl": "[%key:common::config_flow::data::ssl%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your SMA device.",
|
||||
"title": "Reconfigure SMA Solar Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"group": "Group",
|
||||
@@ -44,5 +56,13 @@
|
||||
"title": "Set up SMA Solar"
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"group": {
|
||||
"options": {
|
||||
"installer": "Installer",
|
||||
"user": "User"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
"""Audio helper for SmartThings audio notifications."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import secrets
|
||||
|
||||
from aiohttp import hdrs, web
|
||||
|
||||
from homeassistant.components import ffmpeg
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PCM_SAMPLE_RATE = 24000
|
||||
PCM_SAMPLE_WIDTH = 2
|
||||
PCM_CHANNELS = 1
|
||||
PCM_MIME = "audio/L16"
|
||||
PCM_EXTENSION = ".pcm"
|
||||
WARNING_DURATION_SECONDS = 40
|
||||
FFMPEG_MAX_DURATION_SECONDS = 10 * 60
|
||||
TRANSCODE_TIMEOUT_SECONDS = WARNING_DURATION_SECONDS + 10
|
||||
_TRUNCATION_EPSILON = 1 / PCM_SAMPLE_RATE
|
||||
ENTRY_TTL = timedelta(minutes=5)
|
||||
MAX_STORED_ENTRIES = 4 # Limit the number of cached notifications.
|
||||
|
||||
PCM_FRAME_BYTES = PCM_SAMPLE_WIDTH * PCM_CHANNELS
|
||||
|
||||
DATA_AUDIO_MANAGER = "audio_manager"
|
||||
|
||||
|
||||
class SmartThingsAudioError(HomeAssistantError):
|
||||
"""Error raised when SmartThings audio preparation fails."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class _AudioEntry:
|
||||
"""Stored PCM audio entry."""
|
||||
|
||||
pcm: bytes
|
||||
created: float
|
||||
expires: float
|
||||
|
||||
|
||||
class SmartThingsAudioManager(HomeAssistantView):
|
||||
"""Manage PCM proxy URLs for SmartThings audio notifications."""
|
||||
|
||||
url = "/api/smartthings/audio/{token}"
|
||||
name = "api:smartthings:audio"
|
||||
requires_auth = False
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the manager."""
|
||||
self.hass = hass
|
||||
self._entries: dict[str, _AudioEntry] = {}
|
||||
self._cleanup_handle: asyncio.TimerHandle | None = None
|
||||
|
||||
async def async_prepare_notification(self, source_url: str) -> str:
|
||||
"""Generate an externally accessible PCM URL for SmartThings."""
|
||||
pcm, duration, truncated = await self._transcode_to_pcm(source_url)
|
||||
if not pcm:
|
||||
raise SmartThingsAudioError("Converted audio is empty")
|
||||
|
||||
if truncated:
|
||||
_LOGGER.warning(
|
||||
"SmartThings audio notification truncated to %s seconds (output length %.1fs); longer sources may be cut off",
|
||||
FFMPEG_MAX_DURATION_SECONDS,
|
||||
duration,
|
||||
)
|
||||
elif duration > WARNING_DURATION_SECONDS:
|
||||
_LOGGER.warning(
|
||||
"SmartThings audio notification is %.1fs; playback over %s seconds may be cut off",
|
||||
duration,
|
||||
WARNING_DURATION_SECONDS,
|
||||
)
|
||||
|
||||
token = secrets.token_urlsafe(
|
||||
16
|
||||
) # Shorter tokens avoid playback issues in some devices.
|
||||
now = self.hass.loop.time()
|
||||
entry = _AudioEntry(
|
||||
pcm=pcm,
|
||||
created=now,
|
||||
expires=now + ENTRY_TTL.total_seconds(),
|
||||
)
|
||||
|
||||
self._cleanup(now)
|
||||
while token in self._entries:
|
||||
token = secrets.token_urlsafe(16)
|
||||
self._entries[token] = entry
|
||||
while len(self._entries) > MAX_STORED_ENTRIES:
|
||||
dropped_token = next(iter(self._entries))
|
||||
self._entries.pop(dropped_token, None)
|
||||
_LOGGER.debug(
|
||||
"Dropped oldest SmartThings audio token %s to cap cache",
|
||||
dropped_token,
|
||||
)
|
||||
self._schedule_cleanup()
|
||||
|
||||
path = f"/api/smartthings/audio/{token}{PCM_EXTENSION}"
|
||||
try:
|
||||
base_url = get_url(
|
||||
self.hass,
|
||||
allow_internal=True,
|
||||
allow_external=True,
|
||||
allow_cloud=True,
|
||||
prefer_external=False, # Prevent NAT loopback failures; may break non-local access for devices outside the LAN.
|
||||
prefer_cloud=True,
|
||||
)
|
||||
except NoURLAvailableError as err:
|
||||
self._entries.pop(token, None)
|
||||
self._schedule_cleanup()
|
||||
raise SmartThingsAudioError(
|
||||
"SmartThings audio notifications require an accessible Home Assistant URL"
|
||||
) from err
|
||||
|
||||
return f"{base_url}{path}"
|
||||
|
||||
async def get(self, request: web.Request, token: str) -> web.StreamResponse:
|
||||
"""Serve a PCM audio response."""
|
||||
token = token.removesuffix(PCM_EXTENSION)
|
||||
|
||||
now = self.hass.loop.time()
|
||||
self._cleanup(now)
|
||||
self._schedule_cleanup()
|
||||
entry = self._entries.get(token)
|
||||
|
||||
if entry is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
_LOGGER.debug("Serving SmartThings audio token=%s to %s", token, request.remote)
|
||||
|
||||
response = web.Response(body=entry.pcm, content_type=PCM_MIME)
|
||||
response.headers[hdrs.CACHE_CONTROL] = "no-store"
|
||||
response.headers[hdrs.ACCEPT_RANGES] = "none"
|
||||
response.headers[hdrs.CONTENT_DISPOSITION] = (
|
||||
f'inline; filename="{token}{PCM_EXTENSION}"'
|
||||
)
|
||||
return response
|
||||
|
||||
async def _transcode_to_pcm(self, source_url: str) -> tuple[bytes, float, bool]:
|
||||
"""Use ffmpeg to convert the source media to 24kHz mono PCM."""
|
||||
manager = ffmpeg.get_ffmpeg_manager(self.hass)
|
||||
command = [
|
||||
manager.binary,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-nostdin",
|
||||
"-i",
|
||||
source_url,
|
||||
"-ac",
|
||||
str(PCM_CHANNELS),
|
||||
"-ar",
|
||||
str(PCM_SAMPLE_RATE),
|
||||
"-c:a",
|
||||
"pcm_s16le",
|
||||
"-t",
|
||||
str(FFMPEG_MAX_DURATION_SECONDS),
|
||||
"-f",
|
||||
"s16le",
|
||||
"pipe:1",
|
||||
]
|
||||
|
||||
try:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
except FileNotFoundError as err:
|
||||
raise SmartThingsAudioError(
|
||||
"FFmpeg is required for SmartThings audio notifications"
|
||||
) from err
|
||||
|
||||
try:
|
||||
stdout, stderr = await asyncio.wait_for(
|
||||
process.communicate(), timeout=TRANSCODE_TIMEOUT_SECONDS
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"FFmpeg timed out after %s seconds while converting SmartThings audio from %s",
|
||||
TRANSCODE_TIMEOUT_SECONDS,
|
||||
source_url,
|
||||
)
|
||||
with contextlib.suppress(ProcessLookupError):
|
||||
process.kill()
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
message = stderr.decode().strip() or "unknown error"
|
||||
_LOGGER.error(
|
||||
"FFmpeg failed to convert SmartThings audio from %s: %s",
|
||||
source_url,
|
||||
message,
|
||||
)
|
||||
raise SmartThingsAudioError(
|
||||
"Unable to convert audio to PCM for SmartThings"
|
||||
)
|
||||
|
||||
if not stdout:
|
||||
return b"", 0.0, False
|
||||
|
||||
frame_count, remainder = divmod(len(stdout), PCM_FRAME_BYTES)
|
||||
if remainder:
|
||||
_LOGGER.debug(
|
||||
"SmartThings audio conversion produced misaligned PCM: dropping %s extra byte(s)",
|
||||
remainder,
|
||||
)
|
||||
stdout = stdout[: len(stdout) - remainder]
|
||||
frame_count = len(stdout) // PCM_FRAME_BYTES
|
||||
|
||||
if frame_count == 0:
|
||||
return b"", 0.0, False
|
||||
|
||||
duration = frame_count / PCM_SAMPLE_RATE
|
||||
truncated = duration >= (FFMPEG_MAX_DURATION_SECONDS - _TRUNCATION_EPSILON)
|
||||
return stdout, duration, truncated
|
||||
|
||||
@callback
|
||||
def _schedule_cleanup(self) -> None:
|
||||
"""Schedule the next cleanup based on entry expiry."""
|
||||
if self._cleanup_handle is not None:
|
||||
self._cleanup_handle.cancel()
|
||||
self._cleanup_handle = None
|
||||
if not self._entries:
|
||||
return
|
||||
next_expiry = min(entry.expires for entry in self._entries.values())
|
||||
delay = max(0.0, next_expiry - self.hass.loop.time())
|
||||
self._cleanup_handle = self.hass.loop.call_later(delay, self._cleanup_callback)
|
||||
|
||||
@callback
|
||||
def _cleanup_callback(self) -> None:
|
||||
"""Run a cleanup pass."""
|
||||
self._cleanup_handle = None
|
||||
now = self.hass.loop.time()
|
||||
self._cleanup(now)
|
||||
self._schedule_cleanup()
|
||||
|
||||
def _cleanup(self, now: float) -> None:
|
||||
"""Remove expired entries."""
|
||||
expired = [
|
||||
token for token, entry in self._entries.items() if entry.expires <= now
|
||||
]
|
||||
for token in expired:
|
||||
self._entries.pop(token, None)
|
||||
|
||||
|
||||
async def async_get_audio_manager(hass: HomeAssistant) -> SmartThingsAudioManager:
|
||||
"""Return the shared SmartThings audio manager."""
|
||||
domain_data = hass.data.setdefault(DOMAIN, {})
|
||||
if (manager := domain_data.get(DATA_AUDIO_MANAGER)) is None:
|
||||
manager = SmartThingsAudioManager(hass)
|
||||
hass.http.register_view(manager)
|
||||
domain_data[DATA_AUDIO_MANAGER] = manager
|
||||
return manager
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "SmartThings",
|
||||
"codeowners": ["@joostlek"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials", "http", "ffmpeg"],
|
||||
"dependencies": ["application_credentials"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "st*",
|
||||
|
||||
@@ -6,22 +6,17 @@ from typing import Any
|
||||
|
||||
from pysmartthings import Attribute, Capability, Category, Command, SmartThings
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
MediaPlayerDeviceClass,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
MediaType,
|
||||
RepeatMode,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FullDevice, SmartThingsConfigEntry
|
||||
from .audio import SmartThingsAudioError, async_get_audio_manager
|
||||
from .const import MAIN
|
||||
from .entity import SmartThingsEntity
|
||||
|
||||
@@ -89,7 +84,6 @@ class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity):
|
||||
Capability.AUDIO_MUTE,
|
||||
Capability.AUDIO_TRACK_DATA,
|
||||
Capability.AUDIO_VOLUME,
|
||||
Capability.AUDIO_NOTIFICATION,
|
||||
Capability.MEDIA_INPUT_SOURCE,
|
||||
Capability.MEDIA_PLAYBACK,
|
||||
Capability.MEDIA_PLAYBACK_REPEAT,
|
||||
@@ -134,8 +128,6 @@ class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity):
|
||||
flags |= MediaPlayerEntityFeature.SHUFFLE_SET
|
||||
if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT):
|
||||
flags |= MediaPlayerEntityFeature.REPEAT_SET
|
||||
if self.supports_capability(Capability.AUDIO_NOTIFICATION):
|
||||
flags |= MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
return flags
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@@ -241,40 +233,6 @@ class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity):
|
||||
argument=HA_REPEAT_MODE_TO_SMARTTHINGS[repeat],
|
||||
)
|
||||
|
||||
async def async_play_media(
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Play media using SmartThings audio notifications."""
|
||||
if not self.supports_capability(Capability.AUDIO_NOTIFICATION):
|
||||
raise HomeAssistantError("Device does not support audio notifications")
|
||||
|
||||
if media_type not in (MediaType.MUSIC,):
|
||||
raise HomeAssistantError(
|
||||
"Unsupported media type for SmartThings audio notification"
|
||||
)
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
play_item = await media_source.async_resolve_media(
|
||||
self.hass, media_id, self.entity_id
|
||||
)
|
||||
media_id = async_process_play_media_url(self.hass, play_item.url)
|
||||
else:
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
audio_manager = await async_get_audio_manager(self.hass)
|
||||
try:
|
||||
proxy_url = await audio_manager.async_prepare_notification(media_id)
|
||||
except SmartThingsAudioError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
command = Command("playTrackAndResume")
|
||||
|
||||
await self.execute_device_command(
|
||||
Capability.AUDIO_NOTIFICATION,
|
||||
command,
|
||||
argument=[proxy_url],
|
||||
)
|
||||
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
|
||||
@@ -840,19 +840,26 @@ class NodeEvents:
|
||||
# After ensuring the node is set up in HA, we should check if the node's
|
||||
# device config has changed, and if so, issue a repair registry entry for a
|
||||
# possible reinterview
|
||||
if not node.is_controller_node and await node.async_has_device_config_changed():
|
||||
device_name = device.name_by_user or device.name or "Unnamed device"
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"device_config_file_changed.{device.id}",
|
||||
data={"device_id": device.id, "device_name": device_name},
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
translation_key="device_config_file_changed",
|
||||
translation_placeholders={"device_name": device_name},
|
||||
severity=IssueSeverity.WARNING,
|
||||
)
|
||||
if not node.is_controller_node:
|
||||
issue_id = f"device_config_file_changed.{device.id}"
|
||||
if await node.async_has_device_config_changed():
|
||||
device_name = device.name_by_user or device.name or "Unnamed device"
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
data={"device_id": device.id, "device_name": device_name},
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
translation_key="device_config_file_changed",
|
||||
translation_placeholders={"device_name": device_name},
|
||||
severity=IssueSeverity.WARNING,
|
||||
)
|
||||
else:
|
||||
# Clear any existing repair issue if the device config is not considered
|
||||
# changed. This can happen when the original issue was created by
|
||||
# an upstream bug, or the change has been reverted.
|
||||
async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
|
||||
async def async_handle_discovery_info(
|
||||
self,
|
||||
|
||||
@@ -39,7 +39,7 @@ habluetooth==5.8.0
|
||||
hass-nabucasa==1.9.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20260107.1
|
||||
home-assistant-frontend==20260107.2
|
||||
home-assistant-intents==2026.1.6
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
7
requirements_all.txt
generated
7
requirements_all.txt
generated
@@ -319,7 +319,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.2
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==1.1.1
|
||||
aiomealie==1.2.0
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -1215,7 +1215,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260107.1
|
||||
home-assistant-frontend==20260107.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
@@ -1351,7 +1351,7 @@ kiwiki-client==0.1.1
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2025.12.30.151231
|
||||
knx-frontend==2026.1.15.112308
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1776,7 +1776,6 @@ prowlpy==1.1.1
|
||||
# homeassistant.components.proxmoxve
|
||||
proxmoxer==2.0.1
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.hardware
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.systemmonitor
|
||||
|
||||
7
requirements_test_all.txt
generated
7
requirements_test_all.txt
generated
@@ -304,7 +304,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.2
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==1.1.1
|
||||
aiomealie==1.2.0
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -1073,7 +1073,7 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20260107.1
|
||||
home-assistant-frontend==20260107.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.6
|
||||
@@ -1185,7 +1185,7 @@ kegtron-ble==1.0.2
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2025.12.30.151231
|
||||
knx-frontend==2026.1.15.112308
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1522,7 +1522,6 @@ prometheus-client==0.21.0
|
||||
# homeassistant.components.prowl
|
||||
prowlpy==1.1.1
|
||||
|
||||
# homeassistant.components.cloud
|
||||
# homeassistant.components.hardware
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.systemmonitor
|
||||
|
||||
@@ -87,18 +87,6 @@
|
||||
|
||||
</details>
|
||||
|
||||
## Host resource usage
|
||||
|
||||
Resource | Value
|
||||
--- | ---
|
||||
CPU usage | 25.5%
|
||||
Memory total | 16.0 GB
|
||||
Memory used | 8.0 GB (50.0%)
|
||||
Memory available | 8.0 GB
|
||||
Disk total | 500.0 GB
|
||||
Disk used | 200.0 GB (40.0%)
|
||||
Disk free | 300.0 GB
|
||||
|
||||
## Full logs
|
||||
|
||||
<details><summary>Logs</summary>
|
||||
@@ -193,18 +181,6 @@
|
||||
|
||||
</details>
|
||||
|
||||
## Host resource usage
|
||||
|
||||
Resource | Value
|
||||
--- | ---
|
||||
CPU usage | 25.5%
|
||||
Memory total | 16.0 GB
|
||||
Memory used | 8.0 GB (50.0%)
|
||||
Memory available | 8.0 GB
|
||||
Disk total | 500.0 GB
|
||||
Disk used | 200.0 GB (40.0%)
|
||||
Disk free | 300.0 GB
|
||||
|
||||
## Full logs
|
||||
|
||||
<details><summary>Logs</summary>
|
||||
@@ -220,252 +196,6 @@
|
||||
|
||||
'''
|
||||
# ---
|
||||
# name: test_download_support_package_hassio
|
||||
'''
|
||||
## System Information
|
||||
|
||||
version | core-2025.2.0
|
||||
--- | ---
|
||||
installation_type | Home Assistant OS
|
||||
dev | False
|
||||
hassio | True
|
||||
docker | True
|
||||
container_arch | aarch64
|
||||
user | root
|
||||
virtualenv | False
|
||||
python_version | 3.13.1
|
||||
os_name | Linux
|
||||
os_version | 6.12.9
|
||||
arch | aarch64
|
||||
timezone | US/Pacific
|
||||
config_dir | config
|
||||
|
||||
## Active Integrations
|
||||
|
||||
Built-in integrations: 23
|
||||
Custom integrations: 1
|
||||
|
||||
<details><summary>Built-in integrations</summary>
|
||||
|
||||
Domain | Name
|
||||
--- | ---
|
||||
ai_task | AI Task
|
||||
auth | Auth
|
||||
binary_sensor | Binary Sensor
|
||||
cloud | Home Assistant Cloud
|
||||
cloud.ai_task | Unknown
|
||||
cloud.binary_sensor | Unknown
|
||||
cloud.conversation | Unknown
|
||||
cloud.stt | Unknown
|
||||
cloud.tts | Unknown
|
||||
conversation | Conversation
|
||||
ffmpeg | FFmpeg
|
||||
hassio | hassio
|
||||
homeassistant | Home Assistant Core Integration
|
||||
http | HTTP
|
||||
intent | Intent
|
||||
media_source | Media Source
|
||||
mock_no_info_integration | mock_no_info_integration
|
||||
repairs | Repairs
|
||||
stt | Speech-to-text (STT)
|
||||
system_health | System Health
|
||||
tts | Text-to-speech (TTS)
|
||||
web_rtc | WebRTC
|
||||
webhook | Webhook
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary>Custom integrations</summary>
|
||||
|
||||
Domain | Name | Version | Documentation
|
||||
--- | --- | --- | ---
|
||||
test | Test Components | 1.2.3 | http://example.com
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary>hassio</summary>
|
||||
|
||||
host_os | Home Assistant OS 14.0
|
||||
--- | ---
|
||||
update_channel | stable
|
||||
supervisor_version | supervisor-2025.01.0
|
||||
agent_version | 1.6.0
|
||||
docker_version | 27.4.1
|
||||
disk_total | 128.5 GB
|
||||
disk_used | 45.2 GB
|
||||
healthy | True
|
||||
supported | True
|
||||
host_connectivity | True
|
||||
supervisor_connectivity | True
|
||||
board | green
|
||||
supervisor_api | ok
|
||||
version_api | ok
|
||||
installed_addons | Mosquitto broker (6.4.1), Samba share (12.3.2), Visual Studio Code (5.21.2)
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary>mock_no_info_integration</summary>
|
||||
|
||||
No information available
|
||||
</details>
|
||||
|
||||
<details><summary>cloud</summary>
|
||||
|
||||
logged_in | True
|
||||
--- | ---
|
||||
subscription_expiration | 2025-01-17T11:19:31+00:00
|
||||
relayer_connected | True
|
||||
relayer_region | xx-earth-616
|
||||
remote_enabled | True
|
||||
remote_connected | False
|
||||
alexa_enabled | True
|
||||
google_enabled | False
|
||||
cloud_ice_servers_enabled | True
|
||||
remote_server | us-west-1
|
||||
certificate_status | ready
|
||||
instance_id | 12345678901234567890
|
||||
can_reach_cert_server | Exception: Unexpected exception
|
||||
can_reach_cloud_auth | Failed: unreachable
|
||||
can_reach_cloud | ok
|
||||
|
||||
</details>
|
||||
|
||||
## Host resource usage
|
||||
|
||||
Resource | Value
|
||||
--- | ---
|
||||
CPU usage | 25.5%
|
||||
Memory total | 16.0 GB
|
||||
Memory used | 8.0 GB (50.0%)
|
||||
Memory available | 8.0 GB
|
||||
Disk total | 500.0 GB
|
||||
Disk used | 200.0 GB (40.0%)
|
||||
Disk free | 300.0 GB
|
||||
|
||||
## Add-on resource usage
|
||||
|
||||
<details><summary>Add-on resources</summary>
|
||||
|
||||
Add-on | Version | State | CPU | Memory
|
||||
--- | --- | --- | --- | ---
|
||||
Mosquitto broker | 6.4.1 | started | 0.5% | 1.2%
|
||||
Samba share | 12.3.2 | started | 0.1% | 0.8%
|
||||
Visual Studio Code | 5.21.2 | stopped | N/A | N/A
|
||||
|
||||
</details>
|
||||
|
||||
## Full logs
|
||||
|
||||
<details><summary>Logs</summary>
|
||||
|
||||
```logs
|
||||
2025-02-10 12:00:00.000 INFO (MainThread) [hass_nabucasa.iot] Hass nabucasa log
|
||||
2025-02-10 12:00:00.000 WARNING (MainThread) [snitun.utils.aiohttp_client] Snitun log
|
||||
2025-02-10 12:00:00.000 ERROR (MainThread) [homeassistant.components.cloud.client] Cloud log
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
'''
|
||||
# ---
|
||||
# name: test_download_support_package_host_resources
|
||||
'''
|
||||
## System Information
|
||||
|
||||
version | core-2025.2.0
|
||||
--- | ---
|
||||
installation_type | Home Assistant Container
|
||||
dev | False
|
||||
hassio | False
|
||||
docker | True
|
||||
container_arch | x86_64
|
||||
user | root
|
||||
virtualenv | False
|
||||
python_version | 3.13.1
|
||||
os_name | Linux
|
||||
os_version | 6.12.9
|
||||
arch | x86_64
|
||||
timezone | US/Pacific
|
||||
config_dir | config
|
||||
|
||||
## Active Integrations
|
||||
|
||||
Built-in integrations: 21
|
||||
Custom integrations: 0
|
||||
|
||||
<details><summary>Built-in integrations</summary>
|
||||
|
||||
Domain | Name
|
||||
--- | ---
|
||||
ai_task | AI Task
|
||||
auth | Auth
|
||||
binary_sensor | Binary Sensor
|
||||
cloud | Home Assistant Cloud
|
||||
cloud.ai_task | Unknown
|
||||
cloud.binary_sensor | Unknown
|
||||
cloud.conversation | Unknown
|
||||
cloud.stt | Unknown
|
||||
cloud.tts | Unknown
|
||||
conversation | Conversation
|
||||
ffmpeg | FFmpeg
|
||||
homeassistant | Home Assistant Core Integration
|
||||
http | HTTP
|
||||
intent | Intent
|
||||
media_source | Media Source
|
||||
repairs | Repairs
|
||||
stt | Speech-to-text (STT)
|
||||
system_health | System Health
|
||||
tts | Text-to-speech (TTS)
|
||||
web_rtc | WebRTC
|
||||
webhook | Webhook
|
||||
|
||||
</details>
|
||||
|
||||
<details><summary>cloud</summary>
|
||||
|
||||
logged_in | True
|
||||
--- | ---
|
||||
subscription_expiration | 2025-01-17T11:19:31+00:00
|
||||
relayer_connected | True
|
||||
relayer_region | xx-earth-616
|
||||
remote_enabled | True
|
||||
remote_connected | False
|
||||
alexa_enabled | True
|
||||
google_enabled | False
|
||||
cloud_ice_servers_enabled | True
|
||||
remote_server | us-west-1
|
||||
certificate_status | ready
|
||||
instance_id | 12345678901234567890
|
||||
can_reach_cert_server | Exception: Unexpected exception
|
||||
can_reach_cloud_auth | Failed: unreachable
|
||||
can_reach_cloud | ok
|
||||
|
||||
</details>
|
||||
|
||||
## Host resource usage
|
||||
|
||||
Resource | Value
|
||||
--- | ---
|
||||
CPU usage | 25.5%
|
||||
Memory total | 16.0 GB
|
||||
Memory used | 8.0 GB (50.0%)
|
||||
Memory available | 8.0 GB
|
||||
Disk total | 500.0 GB
|
||||
Disk used | 200.0 GB (40.0%)
|
||||
Disk free | 300.0 GB
|
||||
|
||||
## Full logs
|
||||
|
||||
<details><summary>Logs</summary>
|
||||
|
||||
```logs
|
||||
2025-02-10 12:00:00.000 INFO (MainThread) [hass_nabucasa.iot] Hass nabucasa log
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
'''
|
||||
# ---
|
||||
# name: test_download_support_package_integration_load_error
|
||||
'''
|
||||
## System Information
|
||||
@@ -516,18 +246,6 @@
|
||||
|
||||
</details>
|
||||
|
||||
## Host resource usage
|
||||
|
||||
Resource | Value
|
||||
--- | ---
|
||||
CPU usage | 25.5%
|
||||
Memory total | 16.0 GB
|
||||
Memory used | 8.0 GB (50.0%)
|
||||
Memory available | 8.0 GB
|
||||
Disk total | 500.0 GB
|
||||
Disk used | 200.0 GB (40.0%)
|
||||
Disk free | 300.0 GB
|
||||
|
||||
## Full logs
|
||||
|
||||
<details><summary>Logs</summary>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for the HTTP API for the cloud component."""
|
||||
|
||||
from collections.abc import Callable, Coroutine, Generator
|
||||
from collections.abc import Callable, Coroutine
|
||||
from copy import deepcopy
|
||||
import datetime
|
||||
from http import HTTPStatus
|
||||
@@ -114,36 +114,6 @@ PIPELINE_DATA_OTHER = {
|
||||
SUBSCRIPTION_INFO_URL = "https://api-test.hass.io/payments/subscription_info"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_psutil_wrapper() -> Generator[MagicMock]:
|
||||
"""Fixture to mock psutil for support package tests."""
|
||||
mock_memory = MagicMock()
|
||||
mock_memory.total = 16 * 1024**3 # 16 GB
|
||||
mock_memory.used = 8 * 1024**3 # 8 GB
|
||||
mock_memory.available = 8 * 1024**3 # 8 GB
|
||||
mock_memory.percent = 50.0
|
||||
|
||||
mock_disk = MagicMock()
|
||||
mock_disk.total = 500 * 1024**3 # 500 GB
|
||||
mock_disk.used = 200 * 1024**3 # 200 GB
|
||||
mock_disk.free = 300 * 1024**3 # 300 GB
|
||||
mock_disk.percent = 40.0
|
||||
|
||||
mock_psutil = MagicMock()
|
||||
mock_psutil.cpu_percent = MagicMock(return_value=25.5)
|
||||
mock_psutil.virtual_memory = MagicMock(return_value=mock_memory)
|
||||
mock_psutil.disk_usage = MagicMock(return_value=mock_disk)
|
||||
|
||||
mock_wrapper = MagicMock()
|
||||
mock_wrapper.psutil = mock_psutil
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.cloud.http_api.ha_psutil.PsutilWrapper",
|
||||
return_value=mock_wrapper,
|
||||
):
|
||||
yield mock_wrapper
|
||||
|
||||
|
||||
@pytest.fixture(name="setup_cloud")
|
||||
async def setup_cloud_fixture(hass: HomeAssistant, cloud: MagicMock) -> None:
|
||||
"""Fixture that sets up cloud."""
|
||||
@@ -1876,7 +1846,7 @@ async def test_logout_view_dispatch_event(
|
||||
|
||||
|
||||
@patch("homeassistant.components.cloud.helpers.FixedSizeQueueLogHandler.MAX_RECORDS", 3)
|
||||
@pytest.mark.usefixtures("enable_custom_integrations", "mock_psutil_wrapper")
|
||||
@pytest.mark.usefixtures("enable_custom_integrations")
|
||||
async def test_download_support_package(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
@@ -1989,7 +1959,7 @@ async def test_download_support_package(
|
||||
assert await req.text() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_custom_integrations", "mock_psutil_wrapper")
|
||||
@pytest.mark.usefixtures("enable_custom_integrations")
|
||||
async def test_download_support_package_custom_components_error(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
@@ -2016,7 +1986,7 @@ async def test_download_support_package_custom_components_error(
|
||||
async def mock_empty_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
register.async_register_info(mock_empty_info, "/mock_integration")
|
||||
register.async_register_info(mock_empty_info, "/config/mock_integration")
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
@@ -2101,7 +2071,7 @@ async def test_download_support_package_custom_components_error(
|
||||
assert await req.text() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_custom_integrations", "mock_psutil_wrapper")
|
||||
@pytest.mark.usefixtures("enable_custom_integrations")
|
||||
async def test_download_support_package_integration_load_error(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
@@ -2128,7 +2098,7 @@ async def test_download_support_package_integration_load_error(
|
||||
async def mock_empty_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
register.async_register_info(mock_empty_info, "/mock_integration")
|
||||
register.async_register_info(mock_empty_info, "/config/mock_integration")
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
@@ -2218,277 +2188,6 @@ async def test_download_support_package_integration_load_error(
|
||||
assert await req.text() == snapshot
|
||||
|
||||
|
||||
@patch("homeassistant.components.cloud.helpers.FixedSizeQueueLogHandler.MAX_RECORDS", 3)
|
||||
@pytest.mark.usefixtures("enable_custom_integrations", "mock_psutil_wrapper")
|
||||
async def test_download_support_package_hassio(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]],
|
||||
hass_client: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test downloading a support package file with hassio resources."""
|
||||
|
||||
aioclient_mock.get("https://cloud.bla.com/status", text="")
|
||||
aioclient_mock.get(
|
||||
"https://cert-server/directory", exc=Exception("Unexpected exception")
|
||||
)
|
||||
aioclient_mock.get(
|
||||
"https://cognito-idp.us-east-1.amazonaws.com/AAAA/.well-known/jwks.json",
|
||||
exc=aiohttp.ClientError,
|
||||
)
|
||||
|
||||
def async_register_hassio_platform(
|
||||
hass: HomeAssistant,
|
||||
register: system_health.SystemHealthRegistration,
|
||||
) -> None:
|
||||
async def mock_hassio_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
return {
|
||||
"host_os": "Home Assistant OS 14.0",
|
||||
"update_channel": "stable",
|
||||
"supervisor_version": "supervisor-2025.01.0",
|
||||
"agent_version": "1.6.0",
|
||||
"docker_version": "27.4.1",
|
||||
"disk_total": "128.5 GB",
|
||||
"disk_used": "45.2 GB",
|
||||
"healthy": True,
|
||||
"supported": True,
|
||||
"host_connectivity": True,
|
||||
"supervisor_connectivity": True,
|
||||
"board": "green",
|
||||
"supervisor_api": "ok",
|
||||
"version_api": "ok",
|
||||
"installed_addons": "Mosquitto broker (6.4.1), Samba share (12.3.2), Visual Studio Code (5.21.2)",
|
||||
}
|
||||
|
||||
register.async_register_info(mock_hassio_info, "/hassio/system")
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
"hassio.system_health",
|
||||
MagicMock(async_register=async_register_hassio_platform),
|
||||
)
|
||||
hass.config.components.add("hassio")
|
||||
|
||||
def async_register_mock_platform(
|
||||
hass: HomeAssistant,
|
||||
register: system_health.SystemHealthRegistration,
|
||||
) -> None:
|
||||
async def mock_empty_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
register.async_register_info(mock_empty_info, "/config/mock_integration")
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
"mock_no_info_integration.system_health",
|
||||
MagicMock(async_register=async_register_mock_platform),
|
||||
)
|
||||
hass.config.components.add("mock_no_info_integration")
|
||||
hass.config.components.add("test")
|
||||
|
||||
assert await async_setup_component(hass, "system_health", {})
|
||||
|
||||
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hexmock:
|
||||
hexmock.return_value = "12345678901234567890"
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: {
|
||||
"user_pool_id": "AAAA",
|
||||
"region": "us-east-1",
|
||||
"acme_server": "cert-server",
|
||||
"relayer_server": "cloud.bla.com",
|
||||
},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await cloud.login("test-user", "test-pass")
|
||||
|
||||
cloud.remote.snitun_server = "us-west-1"
|
||||
cloud.remote.certificate_status = CertificateStatus.READY
|
||||
cloud.expiration_date = dt_util.parse_datetime("2025-01-17T11:19:31.0+00:00")
|
||||
|
||||
await cloud.client.async_system_message({"region": "xx-earth-616"})
|
||||
await set_cloud_prefs(
|
||||
{
|
||||
"alexa_enabled": True,
|
||||
"google_enabled": False,
|
||||
"remote_enabled": True,
|
||||
"cloud_ice_servers_enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
now = dt_util.utcnow()
|
||||
tz = now.astimezone().tzinfo
|
||||
freezer.move_to(datetime.datetime(2025, 2, 10, 12, 0, 0, tzinfo=tz))
|
||||
logging.getLogger("hass_nabucasa.iot").info(
|
||||
"This message will be dropped since this test patches MAX_RECORDS"
|
||||
)
|
||||
logging.getLogger("hass_nabucasa.iot").info("Hass nabucasa log")
|
||||
logging.getLogger("snitun.utils.aiohttp_client").warning("Snitun log")
|
||||
logging.getLogger("homeassistant.components.cloud.client").error("Cloud log")
|
||||
freezer.move_to(now)
|
||||
|
||||
cloud_client = await hass_client()
|
||||
|
||||
with (
|
||||
patch.object(hass.config, "config_dir", new="config"),
|
||||
patch(
|
||||
"homeassistant.components.homeassistant.system_health.system_info.async_get_system_info",
|
||||
return_value={
|
||||
"installation_type": "Home Assistant OS",
|
||||
"version": "2025.2.0",
|
||||
"dev": False,
|
||||
"hassio": True,
|
||||
"virtualenv": False,
|
||||
"python_version": "3.13.1",
|
||||
"docker": True,
|
||||
"container_arch": "aarch64",
|
||||
"arch": "aarch64",
|
||||
"timezone": "US/Pacific",
|
||||
"os_name": "Linux",
|
||||
"os_version": "6.12.9",
|
||||
"user": "root",
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.cloud.http_api.get_supervisor_info",
|
||||
return_value={
|
||||
"addons": [
|
||||
{
|
||||
"slug": "core_mosquitto",
|
||||
"name": "Mosquitto broker",
|
||||
"version": "6.4.1",
|
||||
"state": "started",
|
||||
},
|
||||
{
|
||||
"slug": "core_samba",
|
||||
"name": "Samba share",
|
||||
"version": "12.3.2",
|
||||
"state": "started",
|
||||
},
|
||||
{
|
||||
"slug": "a0d7b954_vscode",
|
||||
"name": "Visual Studio Code",
|
||||
"version": "5.21.2",
|
||||
"state": "stopped",
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.cloud.http_api.get_addons_stats",
|
||||
return_value={
|
||||
"core_mosquitto": {
|
||||
"cpu_percent": 0.5,
|
||||
"memory_percent": 1.2,
|
||||
},
|
||||
"core_samba": {
|
||||
"cpu_percent": 0.1,
|
||||
"memory_percent": 0.8,
|
||||
},
|
||||
# No stats for vscode (stopped)
|
||||
},
|
||||
),
|
||||
):
|
||||
req = await cloud_client.get("/api/cloud/support_package")
|
||||
assert req.status == HTTPStatus.OK
|
||||
assert await req.text() == snapshot
|
||||
|
||||
|
||||
@patch("homeassistant.components.cloud.helpers.FixedSizeQueueLogHandler.MAX_RECORDS", 3)
|
||||
@pytest.mark.usefixtures("mock_psutil_wrapper")
|
||||
async def test_download_support_package_host_resources(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]],
|
||||
hass_client: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test downloading a support package file with psutil host resources (non-hassio)."""
|
||||
aioclient_mock.get("https://cloud.bla.com/status", text="")
|
||||
aioclient_mock.get(
|
||||
"https://cert-server/directory", exc=Exception("Unexpected exception")
|
||||
)
|
||||
aioclient_mock.get(
|
||||
"https://cognito-idp.us-east-1.amazonaws.com/AAAA/.well-known/jwks.json",
|
||||
exc=aiohttp.ClientError,
|
||||
)
|
||||
|
||||
assert await async_setup_component(hass, "system_health", {})
|
||||
|
||||
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hexmock:
|
||||
hexmock.return_value = "12345678901234567890"
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: {
|
||||
"user_pool_id": "AAAA",
|
||||
"region": "us-east-1",
|
||||
"acme_server": "cert-server",
|
||||
"relayer_server": "cloud.bla.com",
|
||||
},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await cloud.login("test-user", "test-pass")
|
||||
|
||||
cloud.remote.snitun_server = "us-west-1"
|
||||
cloud.remote.certificate_status = CertificateStatus.READY
|
||||
cloud.expiration_date = dt_util.parse_datetime("2025-01-17T11:19:31.0+00:00")
|
||||
|
||||
await cloud.client.async_system_message({"region": "xx-earth-616"})
|
||||
await set_cloud_prefs(
|
||||
{
|
||||
"alexa_enabled": True,
|
||||
"google_enabled": False,
|
||||
"remote_enabled": True,
|
||||
"cloud_ice_servers_enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
now = dt_util.utcnow()
|
||||
tz = now.astimezone().tzinfo
|
||||
freezer.move_to(datetime.datetime(2025, 2, 10, 12, 0, 0, tzinfo=tz))
|
||||
logging.getLogger("hass_nabucasa.iot").info("Hass nabucasa log")
|
||||
freezer.move_to(now)
|
||||
|
||||
cloud_client = await hass_client()
|
||||
with (
|
||||
patch.object(hass.config, "config_dir", new="config"),
|
||||
patch(
|
||||
"homeassistant.components.homeassistant.system_health.system_info.async_get_system_info",
|
||||
return_value={
|
||||
"installation_type": "Home Assistant Container",
|
||||
"version": "2025.2.0",
|
||||
"dev": False,
|
||||
"hassio": False,
|
||||
"virtualenv": False,
|
||||
"python_version": "3.13.1",
|
||||
"docker": True,
|
||||
"container_arch": "x86_64",
|
||||
"arch": "x86_64",
|
||||
"timezone": "US/Pacific",
|
||||
"os_name": "Linux",
|
||||
"os_version": "6.12.9",
|
||||
"user": "root",
|
||||
},
|
||||
),
|
||||
):
|
||||
req = await cloud_client.get("/api/cloud/support_package")
|
||||
assert req.status == HTTPStatus.OK
|
||||
assert await req.text() == snapshot
|
||||
|
||||
|
||||
async def test_websocket_ice_servers(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Tests for analytics platform."""
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.analytics import async_devices_payload
|
||||
from homeassistant.components.esphome import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -11,7 +9,6 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics(
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
|
||||
@@ -52,7 +52,6 @@ async def test_async_setup_entry_errors(
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_setup_entry_success(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MagicMock,
|
||||
@@ -67,7 +66,6 @@ async def test_async_setup_entry_success(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MagicMock,
|
||||
@@ -87,7 +85,6 @@ async def test_async_unload_entry(
|
||||
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_platforms_forwarded(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MagicMock,
|
||||
|
||||
@@ -38,7 +38,6 @@ async def test_sensors(
|
||||
ValueError,
|
||||
],
|
||||
)
|
||||
@pytest.mark.asyncio
|
||||
async def test_sensor_unavailable_on_update_error(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.components.labs import (
|
||||
EVENT_LABS_UPDATED,
|
||||
async_is_preview_feature_enabled,
|
||||
async_listen,
|
||||
async_update_preview_feature,
|
||||
)
|
||||
from homeassistant.components.labs.const import DOMAIN, LABS_DATA
|
||||
from homeassistant.components.labs.models import LabPreviewFeature
|
||||
@@ -20,6 +21,8 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import assert_stored_labs_data
|
||||
|
||||
from tests.common import async_capture_events
|
||||
|
||||
|
||||
async def test_async_setup(hass: HomeAssistant) -> None:
|
||||
"""Test the Labs integration setup."""
|
||||
@@ -436,3 +439,57 @@ async def test_async_listen_helper(hass: HomeAssistant) -> None:
|
||||
|
||||
# Verify listener was not called after unsubscribe
|
||||
assert len(listener_calls) == 1
|
||||
|
||||
|
||||
async def test_async_update_preview_feature(
|
||||
hass: HomeAssistant, hass_storage: dict[str, Any]
|
||||
) -> None:
|
||||
"""Test enabling and disabling a preview feature using the helper function."""
|
||||
hass.config.components.add("kitchen_sink")
|
||||
|
||||
assert await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
events = async_capture_events(hass, EVENT_LABS_UPDATED)
|
||||
|
||||
await async_update_preview_feature(
|
||||
hass, "kitchen_sink", "special_repair", enabled=True
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert async_is_preview_feature_enabled(hass, "kitchen_sink", "special_repair")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].data["domain"] == "kitchen_sink"
|
||||
assert events[0].data["preview_feature"] == "special_repair"
|
||||
assert events[0].data["enabled"] is True
|
||||
|
||||
assert_stored_labs_data(
|
||||
hass_storage,
|
||||
[{"domain": "kitchen_sink", "preview_feature": "special_repair"}],
|
||||
)
|
||||
|
||||
await async_update_preview_feature(
|
||||
hass, "kitchen_sink", "special_repair", enabled=False
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert not async_is_preview_feature_enabled(hass, "kitchen_sink", "special_repair")
|
||||
|
||||
assert len(events) == 2
|
||||
assert events[1].data["domain"] == "kitchen_sink"
|
||||
assert events[1].data["preview_feature"] == "special_repair"
|
||||
assert events[1].data["enabled"] is False
|
||||
|
||||
assert_stored_labs_data(hass_storage, [])
|
||||
|
||||
|
||||
async def test_async_update_preview_feature_not_found(hass: HomeAssistant) -> None:
|
||||
"""Test updating a preview feature that doesn't exist raises."""
|
||||
assert await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(
|
||||
ValueError, match="Preview feature nonexistent.feature not found"
|
||||
):
|
||||
await async_update_preview_feature(hass, "nonexistent", "feature", enabled=True)
|
||||
|
||||
393
tests/components/mastodon/snapshots/test_binary_sensor.ambr
Normal file
393
tests/components/mastodon/snapshots/test_binary_sensor.ambr
Normal file
@@ -0,0 +1,393 @@
|
||||
# serializer version: 1
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_bot-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_bot',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Bot',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Bot',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.BOT: 'bot'>,
|
||||
'unique_id': 'trwnh_mastodon_social_bot',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_bot-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Bot',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_bot',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_discoverable-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_discoverable',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Discoverable',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Discoverable',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.DISCOVERABLE: 'discoverable'>,
|
||||
'unique_id': 'trwnh_mastodon_social_discoverable',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_discoverable-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Discoverable',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_discoverable',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_indexable-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_indexable',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Indexable',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Indexable',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.INDEXABLE: 'indexable'>,
|
||||
'unique_id': 'trwnh_mastodon_social_indexable',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_indexable-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Indexable',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_indexable',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_limited-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_limited',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Limited',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Limited',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.LIMITED: 'limited'>,
|
||||
'unique_id': 'trwnh_mastodon_social_limited',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_limited-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Limited',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_limited',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_locked-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_locked',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Locked',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Locked',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.LOCKED: 'locked'>,
|
||||
'unique_id': 'trwnh_mastodon_social_locked',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_locked-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Locked',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_locked',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_memorial-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_memorial',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Memorial',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Memorial',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.MEMORIAL: 'memorial'>,
|
||||
'unique_id': 'trwnh_mastodon_social_memorial',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_memorial-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Memorial',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_memorial',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_moved-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_moved',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Moved',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Moved',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.MOVED: 'moved'>,
|
||||
'unique_id': 'trwnh_mastodon_social_moved',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_moved-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Moved',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_moved',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_suspended-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_suspended',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Suspended',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Suspended',
|
||||
'platform': 'mastodon',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <MastodonBinarySensor.SUSPENDED: 'suspended'>,
|
||||
'unique_id': 'trwnh_mastodon_social_suspended',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[binary_sensor.mastodon_trwnh_mastodon_social_suspended-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Mastodon @trwnh@mastodon.social Suspended',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mastodon_trwnh_mastodon_social_suspended',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
28
tests/components/mastodon/test_binary_sensor.py
Normal file
28
tests/components/mastodon/test_binary_sensor.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Tests for the Mastodon binary sensors."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_mastodon_client", "entity_registry_enabled_by_default")
|
||||
async def test_binary_sensors(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the binary sensor entities."""
|
||||
with patch("homeassistant.components.mastodon.PLATFORMS", [Platform.BINARY_SENSOR]):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
@@ -349,7 +349,7 @@
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[eve_shutter][binary_sensor.eve_shutter_switch_20eci1701_problem-entry]
|
||||
# name: test_binary_sensors[eve_shutter][binary_sensor.eve_shutter_switch_20eci1701_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -362,7 +362,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.eve_shutter_switch_20eci1701_problem',
|
||||
'entity_id': 'binary_sensor.eve_shutter_switch_20eci1701_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -370,29 +370,29 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000094-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[eve_shutter][binary_sensor.eve_shutter_switch_20eci1701_problem-state]
|
||||
# name: test_binary_sensors[eve_shutter][binary_sensor.eve_shutter_switch_20eci1701_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Eve Shutter Switch 20ECI1701 Problem',
|
||||
'friendly_name': 'Eve Shutter Switch 20ECI1701 Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.eve_shutter_switch_20eci1701_problem',
|
||||
'entity_id': 'binary_sensor.eve_shutter_switch_20eci1701_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
@@ -1942,7 +1942,7 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_full][binary_sensor.mock_full_window_covering_problem-entry]
|
||||
# name: test_binary_sensors[window_covering_full][binary_sensor.mock_full_window_covering_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -1955,7 +1955,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mock_full_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_full_window_covering_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -1963,36 +1963,36 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_full][binary_sensor.mock_full_window_covering_problem-state]
|
||||
# name: test_binary_sensors[window_covering_full][binary_sensor.mock_full_window_covering_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Mock Full Window Covering Problem',
|
||||
'friendly_name': 'Mock Full Window Covering Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mock_full_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_full_window_covering_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_lift][binary_sensor.mock_lift_window_covering_problem-entry]
|
||||
# name: test_binary_sensors[window_covering_lift][binary_sensor.mock_lift_window_covering_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -2005,7 +2005,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mock_lift_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_lift_window_covering_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -2013,36 +2013,36 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_lift][binary_sensor.mock_lift_window_covering_problem-state]
|
||||
# name: test_binary_sensors[window_covering_lift][binary_sensor.mock_lift_window_covering_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Mock Lift Window Covering Problem',
|
||||
'friendly_name': 'Mock Lift Window Covering Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mock_lift_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_lift_window_covering_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_pa_lift][binary_sensor.longan_link_wncv_da01_problem-entry]
|
||||
# name: test_binary_sensors[window_covering_pa_lift][binary_sensor.longan_link_wncv_da01_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -2055,7 +2055,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.longan_link_wncv_da01_problem',
|
||||
'entity_id': 'binary_sensor.longan_link_wncv_da01_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -2063,36 +2063,36 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_pa_lift][binary_sensor.longan_link_wncv_da01_problem-state]
|
||||
# name: test_binary_sensors[window_covering_pa_lift][binary_sensor.longan_link_wncv_da01_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Longan link WNCV DA01 Problem',
|
||||
'friendly_name': 'Longan link WNCV DA01 Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.longan_link_wncv_da01_problem',
|
||||
'entity_id': 'binary_sensor.longan_link_wncv_da01_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_pa_tilt][binary_sensor.mock_pa_tilt_window_covering_problem-entry]
|
||||
# name: test_binary_sensors[window_covering_pa_tilt][binary_sensor.mock_pa_tilt_window_covering_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -2105,7 +2105,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mock_pa_tilt_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_pa_tilt_window_covering_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -2113,36 +2113,36 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_pa_tilt][binary_sensor.mock_pa_tilt_window_covering_problem-state]
|
||||
# name: test_binary_sensors[window_covering_pa_tilt][binary_sensor.mock_pa_tilt_window_covering_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Mock PA Tilt Window Covering Problem',
|
||||
'friendly_name': 'Mock PA Tilt Window Covering Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mock_pa_tilt_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_pa_tilt_window_covering_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_tilt][binary_sensor.mock_tilt_window_covering_problem-entry]
|
||||
# name: test_binary_sensors[window_covering_tilt][binary_sensor.mock_tilt_window_covering_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -2155,7 +2155,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.mock_tilt_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_tilt_window_covering_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -2163,36 +2163,36 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[window_covering_tilt][binary_sensor.mock_tilt_window_covering_problem-state]
|
||||
# name: test_binary_sensors[window_covering_tilt][binary_sensor.mock_tilt_window_covering_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Mock Tilt Window Covering Problem',
|
||||
'friendly_name': 'Mock Tilt Window Covering Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.mock_tilt_window_covering_problem',
|
||||
'entity_id': 'binary_sensor.mock_tilt_window_covering_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[zemismart_mt25b][binary_sensor.zemismart_mt25b_roller_motor_problem-entry]
|
||||
# name: test_binary_sensors[zemismart_mt25b][binary_sensor.zemismart_mt25b_roller_motor_configuration_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -2205,7 +2205,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.zemismart_mt25b_roller_motor_problem',
|
||||
'entity_id': 'binary_sensor.zemismart_mt25b_roller_motor_configuration_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -2213,29 +2213,29 @@
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'object_id_base': 'Problem',
|
||||
'object_id_base': 'Configuration status',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PROBLEM: 'problem'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Problem',
|
||||
'original_name': 'Configuration status',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'translation_key': 'config_status_operational',
|
||||
'unique_id': '00000000000004D2-000000000000007A-MatterNodeDevice-1-WindowCoveringConfigStatusOperational-258-7',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[zemismart_mt25b][binary_sensor.zemismart_mt25b_roller_motor_problem-state]
|
||||
# name: test_binary_sensors[zemismart_mt25b][binary_sensor.zemismart_mt25b_roller_motor_configuration_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'problem',
|
||||
'friendly_name': 'Zemismart MT25B Roller Motor Problem',
|
||||
'friendly_name': 'Zemismart MT25B Roller Motor Configuration status',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.zemismart_mt25b_roller_motor_problem',
|
||||
'entity_id': 'binary_sensor.zemismart_mt25b_roller_motor_configuration_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
|
||||
@@ -424,7 +424,9 @@ async def test_shutter_problem(
|
||||
) -> None:
|
||||
"""Test shutter problem."""
|
||||
# Eve Shutter default state (ConfigStatus = 9)
|
||||
state = hass.states.get("binary_sensor.eve_shutter_switch_20eci1701_problem")
|
||||
state = hass.states.get(
|
||||
"binary_sensor.eve_shutter_switch_20eci1701_configuration_status"
|
||||
)
|
||||
assert state
|
||||
assert state.state == "off"
|
||||
|
||||
@@ -432,7 +434,9 @@ async def test_shutter_problem(
|
||||
set_node_attribute(matter_node, 1, 258, 7, 8)
|
||||
await trigger_subscription_callback(hass, matter_client)
|
||||
|
||||
state = hass.states.get("binary_sensor.eve_shutter_switch_20eci1701_problem")
|
||||
state = hass.states.get(
|
||||
"binary_sensor.eve_shutter_switch_20eci1701_configuration_status"
|
||||
)
|
||||
assert state
|
||||
assert state.state == "on"
|
||||
|
||||
|
||||
@@ -1647,80 +1647,135 @@
|
||||
'image': 'SuPW',
|
||||
'ingredients': list([
|
||||
dict({
|
||||
'display': '1 130g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'food': None,
|
||||
'is_food': None,
|
||||
'note': '130g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 1 Vanilla Pod',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '1 Vanilla Pod',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 150g softened butter',
|
||||
'food': None,
|
||||
'is_food': None,
|
||||
'note': '150g softened butter',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 100g Icing sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '100g Icing sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 6 Eggs',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '6 Eggs',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 100g Castor sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '100g Castor sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 140g Plain wheat flour',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '140g Plain wheat flour',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 200g apricot jam',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '200g apricot jam',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 200g castor sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '200g castor sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 150g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '150g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 Unsweetend whipped cream to garnish',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': 'Unsweetend whipped cream to garnish',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
]),
|
||||
@@ -2223,80 +2278,135 @@
|
||||
'image': 'SuPW',
|
||||
'ingredients': list([
|
||||
dict({
|
||||
'display': '1 130g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'food': None,
|
||||
'is_food': None,
|
||||
'note': '130g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 1 Vanilla Pod',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '1 Vanilla Pod',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 150g softened butter',
|
||||
'food': None,
|
||||
'is_food': None,
|
||||
'note': '150g softened butter',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 100g Icing sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '100g Icing sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 6 Eggs',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '6 Eggs',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 100g Castor sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '100g Castor sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 140g Plain wheat flour',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '140g Plain wheat flour',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 200g apricot jam',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '200g apricot jam',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 200g castor sugar',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '200g castor sugar',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 150g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': '150g dark couverture chocolate (min. 55% cocoa content)',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
dict({
|
||||
'display': '1 Unsweetend whipped cream to garnish',
|
||||
'food': None,
|
||||
'is_food': True,
|
||||
'note': 'Unsweetend whipped cream to garnish',
|
||||
'original_text': None,
|
||||
'quantity': 1.0,
|
||||
'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7',
|
||||
'referenced_recipe': None,
|
||||
'title': None,
|
||||
'unit': None,
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
@@ -12,7 +10,6 @@ from . import MAC, setup_mock_device
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_migrate_camera_entities_unique_ids(hass: HomeAssistant) -> None:
|
||||
"""Test that camera entities unique ids get migrated properly."""
|
||||
config_entry = MockConfigEntry(domain="onvif", unique_id=MAC)
|
||||
|
||||
@@ -21,7 +21,6 @@ START_DATE = date(2025, 10, 4)
|
||||
END_DATE = date(2025, 10, 5)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"pickup_name",
|
||||
|
||||
@@ -35,6 +35,14 @@ MOCK_USER_REAUTH = {
|
||||
CONF_PASSWORD: "new_password",
|
||||
}
|
||||
|
||||
MOCK_USER_RECONFIGURE = {
|
||||
CONF_HOST: "1.1.1.2",
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
CONF_GROUP: "user",
|
||||
}
|
||||
|
||||
|
||||
MOCK_DHCP_DISCOVERY_INPUT = {
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
|
||||
@@ -3,11 +3,12 @@
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from pysma import SmaAuthenticationException, SmaConnectionException, SmaReadException
|
||||
from pysma.helpers import DeviceInfo
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.sma.const import DOMAIN
|
||||
from homeassistant.components.sma.const import CONF_GROUP, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_SSL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
@@ -19,6 +20,7 @@ from . import (
|
||||
MOCK_DHCP_DISCOVERY_INPUT,
|
||||
MOCK_USER_INPUT,
|
||||
MOCK_USER_REAUTH,
|
||||
MOCK_USER_RECONFIGURE,
|
||||
)
|
||||
|
||||
from tests.conftest import MockConfigEntry
|
||||
@@ -311,3 +313,109 @@ async def test_reauth_flow_exceptions(
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
|
||||
|
||||
async def test_full_flow_reconfigure(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: MockConfigEntry,
|
||||
mock_sma_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test the full flow of the config flow."""
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, unique_id="123456789")
|
||||
entry.add_to_hass(hass)
|
||||
result = await entry.start_reconfigure_flow(hass)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reconfigure"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=MOCK_USER_RECONFIGURE,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
assert entry.data[CONF_HOST] == "1.1.1.2"
|
||||
assert entry.data[CONF_SSL] is True
|
||||
assert entry.data[CONF_VERIFY_SSL] is False
|
||||
assert entry.data[CONF_GROUP] == "user"
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(SmaConnectionException, "cannot_connect"),
|
||||
(SmaAuthenticationException, "invalid_auth"),
|
||||
(SmaReadException, "cannot_retrieve_device_info"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
)
|
||||
async def test_full_flow_reconfigure_exceptions(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: MockConfigEntry,
|
||||
mock_sma_client: AsyncMock,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test we handle cannot connect error and recover from it."""
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, unique_id="123456789")
|
||||
entry.add_to_hass(hass)
|
||||
result = await entry.start_reconfigure_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reconfigure"
|
||||
|
||||
mock_sma_client.new_session.side_effect = exception
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
MOCK_USER_RECONFIGURE,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": error}
|
||||
|
||||
mock_sma_client.new_session.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=MOCK_USER_RECONFIGURE,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
assert entry.data[CONF_HOST] == "1.1.1.2"
|
||||
assert entry.data[CONF_SSL] is True
|
||||
assert entry.data[CONF_VERIFY_SSL] is False
|
||||
assert entry.data[CONF_GROUP] == "user"
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_reconfigure_mismatch_id(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: MockConfigEntry,
|
||||
mock_sma_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test when a mismatch happens during reconfigure."""
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, unique_id="123456789")
|
||||
entry.add_to_hass(hass)
|
||||
result = await entry.start_reconfigure_flow(hass)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reconfigure"
|
||||
|
||||
# New device, on purpose to demonstrate we can't switch
|
||||
different_device = DeviceInfo(
|
||||
manufacturer="SMA",
|
||||
name="Different SMA Device",
|
||||
type="Sunny Boy 5.0",
|
||||
serial=987654321,
|
||||
sw_version="2.0.0",
|
||||
)
|
||||
mock_sma_client.device_info = AsyncMock(return_value=different_device)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=MOCK_USER_RECONFIGURE,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "unique_id_mismatch"
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Tests for the SmartThings integration."""
|
||||
|
||||
import sys
|
||||
import types
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
@@ -92,38 +90,3 @@ async def trigger_health_update(
|
||||
if call[0][0] == device_id:
|
||||
call[0][1](event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
def ensure_haffmpeg_stubs() -> None:
|
||||
"""Ensure haffmpeg stubs are available for SmartThings tests."""
|
||||
if "haffmpeg" in sys.modules:
|
||||
return
|
||||
|
||||
haffmpeg_module = types.ModuleType("haffmpeg")
|
||||
haffmpeg_core_module = types.ModuleType("haffmpeg.core")
|
||||
haffmpeg_tools_module = types.ModuleType("haffmpeg.tools")
|
||||
|
||||
class _StubHAFFmpeg: ...
|
||||
|
||||
class _StubFFVersion:
|
||||
def __init__(self, bin_path: str | None = None) -> None:
|
||||
self.bin_path = bin_path
|
||||
|
||||
async def get_version(self) -> str:
|
||||
return "4.0.0"
|
||||
|
||||
class _StubImageFrame: ...
|
||||
|
||||
haffmpeg_core_module.HAFFmpeg = _StubHAFFmpeg
|
||||
haffmpeg_tools_module.IMAGE_JPEG = b""
|
||||
haffmpeg_tools_module.FFVersion = _StubFFVersion
|
||||
haffmpeg_tools_module.ImageFrame = _StubImageFrame
|
||||
haffmpeg_module.core = haffmpeg_core_module
|
||||
haffmpeg_module.tools = haffmpeg_tools_module
|
||||
|
||||
sys.modules["haffmpeg"] = haffmpeg_module
|
||||
sys.modules["haffmpeg.core"] = haffmpeg_core_module
|
||||
sys.modules["haffmpeg.tools"] = haffmpeg_tools_module
|
||||
|
||||
|
||||
ensure_haffmpeg_stubs()
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <MediaPlayerEntityFeature: 24461>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 23949>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main',
|
||||
'unit_of_measurement': None,
|
||||
@@ -59,7 +59,7 @@
|
||||
'HDMI2',
|
||||
'digital',
|
||||
]),
|
||||
'supported_features': <MediaPlayerEntityFeature: 24461>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 23949>,
|
||||
'volume_level': 0.01,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -101,7 +101,7 @@
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <MediaPlayerEntityFeature: 318989>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 318477>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main',
|
||||
'unit_of_measurement': None,
|
||||
@@ -115,7 +115,7 @@
|
||||
'is_volume_muted': False,
|
||||
'repeat': <RepeatMode.OFF: 'off'>,
|
||||
'shuffle': False,
|
||||
'supported_features': <MediaPlayerEntityFeature: 318989>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 318477>,
|
||||
'volume_level': 0.52,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -157,7 +157,7 @@
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <MediaPlayerEntityFeature: 22029>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 21517>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main',
|
||||
'unit_of_measurement': None,
|
||||
@@ -171,7 +171,7 @@
|
||||
'is_volume_muted': False,
|
||||
'media_artist': 'David Guetta',
|
||||
'media_title': 'Forever Young',
|
||||
'supported_features': <MediaPlayerEntityFeature: 22029>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 21517>,
|
||||
'volume_level': 0.15,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -213,7 +213,7 @@
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <MediaPlayerEntityFeature: 22413>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 21901>,
|
||||
'translation_key': None,
|
||||
'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main',
|
||||
'unit_of_measurement': None,
|
||||
@@ -228,7 +228,7 @@
|
||||
'media_artist': '',
|
||||
'media_title': '',
|
||||
'source': 'HDMI1',
|
||||
'supported_features': <MediaPlayerEntityFeature: 22413>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 21901>,
|
||||
'volume_level': 0.17,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -270,7 +270,7 @@
|
||||
'platform': 'smartthings',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <MediaPlayerEntityFeature: 1932>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 1420>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'a75cb1e1-03fd-3c77-ca9f-d4e56c4096c6_main',
|
||||
'unit_of_measurement': None,
|
||||
@@ -281,7 +281,7 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'speaker',
|
||||
'friendly_name': 'Soundbar',
|
||||
'supported_features': <MediaPlayerEntityFeature: 1932>,
|
||||
'supported_features': <MediaPlayerEntityFeature: 1420>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'media_player.soundbar',
|
||||
|
||||
@@ -1,531 +0,0 @@
|
||||
"""Tests for SmartThings audio helper."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.smartthings.audio import (
|
||||
FFMPEG_MAX_DURATION_SECONDS,
|
||||
MAX_STORED_ENTRIES,
|
||||
PCM_CHANNELS,
|
||||
PCM_MIME,
|
||||
PCM_SAMPLE_RATE,
|
||||
PCM_SAMPLE_WIDTH,
|
||||
TRANSCODE_TIMEOUT_SECONDS,
|
||||
WARNING_DURATION_SECONDS,
|
||||
SmartThingsAudioError,
|
||||
async_get_audio_manager,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.network import NoURLAvailableError
|
||||
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
class _FakeProcess:
|
||||
"""Async subprocess stand-in that provides communicate."""
|
||||
|
||||
def __init__(self, stdout: bytes, stderr: bytes, returncode: int) -> None:
|
||||
self._stdout = stdout
|
||||
self._stderr = stderr
|
||||
self.returncode = returncode
|
||||
self.killed = False
|
||||
|
||||
async def communicate(self) -> tuple[bytes, bytes]:
|
||||
return self._stdout, self._stderr
|
||||
|
||||
def kill(self) -> None:
|
||||
self.killed = True
|
||||
|
||||
|
||||
def _build_pcm(
|
||||
duration_seconds: float = 1.0,
|
||||
*,
|
||||
sample_rate: int = PCM_SAMPLE_RATE,
|
||||
sample_width: int = PCM_SAMPLE_WIDTH,
|
||||
channels: int = PCM_CHANNELS,
|
||||
) -> bytes:
|
||||
"""Generate silent raw PCM bytes for testing."""
|
||||
frame_count = int(sample_rate * duration_seconds)
|
||||
return b"\x00" * frame_count * sample_width * channels
|
||||
|
||||
|
||||
async def test_prepare_notification_creates_url(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
) -> None:
|
||||
"""Ensure PCM proxy URLs are generated and served."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm()
|
||||
|
||||
with patch.object(
|
||||
manager, "_transcode_to_pcm", AsyncMock(return_value=(pcm_bytes, 1.0, False))
|
||||
):
|
||||
url = await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
parsed = urlsplit(url)
|
||||
assert parsed.path.endswith(".pcm")
|
||||
assert not parsed.query
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
response = await client.get(parsed.path)
|
||||
assert response.status == 200
|
||||
assert response.headers["Content-Type"] == PCM_MIME
|
||||
assert response.headers["Cache-Control"] == "no-store"
|
||||
body = await response.read()
|
||||
assert body == pcm_bytes
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_uses_internal_url_when_external_missing(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Fallback to the internal URL if no external URL is available."""
|
||||
|
||||
hass.config.external_url = None
|
||||
hass.config.internal_url = "http://homeassistant.local:8123"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm()
|
||||
|
||||
with patch.object(
|
||||
manager, "_transcode_to_pcm", AsyncMock(return_value=(pcm_bytes, 1.0, False))
|
||||
):
|
||||
url = await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
parsed = urlsplit(url)
|
||||
assert parsed.scheme == "http"
|
||||
assert parsed.netloc == "homeassistant.local:8123"
|
||||
assert parsed.path.endswith(".pcm")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_requires_accessible_url(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Fail if neither external nor internal URLs are available."""
|
||||
|
||||
hass.config.external_url = None
|
||||
hass.config.internal_url = None
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm()
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, 1.0, False)),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.get_url",
|
||||
side_effect=NoURLAvailableError,
|
||||
) as mock_get_url,
|
||||
pytest.raises(SmartThingsAudioError),
|
||||
):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
assert mock_get_url.called
|
||||
# Stored entry should be cleaned up after failure so subsequent requests
|
||||
# don't leak memory or serve stale audio.
|
||||
assert not manager._entries
|
||||
|
||||
|
||||
async def test_audio_view_returns_404_for_unknown_token(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
) -> None:
|
||||
"""Unknown tokens should return 404."""
|
||||
|
||||
await async_get_audio_manager(hass)
|
||||
client = await hass_client_no_auth()
|
||||
response = await client.get("/api/smartthings/audio/invalid-token.pcm")
|
||||
assert response.status == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_raises_when_transcode_empty(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Transcoding empty audio results in an error."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
manager, "_transcode_to_pcm", AsyncMock(return_value=(b"", 0.0, False))
|
||||
),
|
||||
pytest.raises(SmartThingsAudioError, match="Converted audio is empty"),
|
||||
):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_warns_when_duration_exceeds_max(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Warn when transcoded audio exceeds the SmartThings duration limit."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = b"pcm"
|
||||
caplog.set_level(logging.WARNING)
|
||||
|
||||
with patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, FFMPEG_MAX_DURATION_SECONDS + 1.0, True)),
|
||||
):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
assert any("truncated" in record.message for record in caplog.records)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_warns_when_duration_exceeds_warning(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Warn when transcoded audio exceeds the SmartThings warning threshold."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm(duration_seconds=WARNING_DURATION_SECONDS + 1)
|
||||
caplog.set_level(logging.WARNING)
|
||||
|
||||
with patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, WARNING_DURATION_SECONDS + 1.0, False)),
|
||||
):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
assert any(
|
||||
"playback over" in record.message and "truncated" not in record.message
|
||||
for record in caplog.records
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_regenerates_token_on_collision(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Regenerate tokens when a collision is detected."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
pcm_bytes = _build_pcm()
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, 1.0, False)),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.secrets.token_urlsafe",
|
||||
side_effect=["dup", "dup", "unique"],
|
||||
),
|
||||
):
|
||||
url1 = await manager.async_prepare_notification(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
url2 = await manager.async_prepare_notification(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
assert urlsplit(url1).path.endswith("/dup.pcm")
|
||||
assert urlsplit(url2).path.endswith("/unique.pcm")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_schedules_cleanup(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Ensure cached entries are scheduled for cleanup."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm()
|
||||
|
||||
with patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, 1.0, False)),
|
||||
):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
assert manager._cleanup_handle is not None
|
||||
for entry in manager._entries.values():
|
||||
entry.expires = 0
|
||||
|
||||
manager._cleanup_callback()
|
||||
|
||||
assert not manager._entries
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_prepare_notification_caps_entry_count(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Ensure cached entries are capped."""
|
||||
|
||||
hass.config.external_url = "https://example.com"
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
pcm_bytes = _build_pcm()
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
with patch.object(
|
||||
manager,
|
||||
"_transcode_to_pcm",
|
||||
AsyncMock(return_value=(pcm_bytes, 1.0, False)),
|
||||
):
|
||||
for _ in range(MAX_STORED_ENTRIES + 2):
|
||||
await manager.async_prepare_notification("https://example.com/source.mp3")
|
||||
|
||||
assert len(manager._entries) == MAX_STORED_ENTRIES
|
||||
assert any(
|
||||
"Dropped oldest SmartThings audio token" in record.message
|
||||
for record in caplog.records
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_handles_missing_ffmpeg(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Raise friendly error when ffmpeg is unavailable."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
side_effect=FileNotFoundError,
|
||||
),
|
||||
pytest.raises(SmartThingsAudioError, match="FFmpeg is required"),
|
||||
):
|
||||
await manager._transcode_to_pcm("https://example.com/source.mp3")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_handles_process_failure(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Raise when ffmpeg reports an error."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
caplog.set_level(logging.ERROR)
|
||||
|
||||
fake_process = _FakeProcess(stdout=b"", stderr=b"boom", returncode=1)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
AsyncMock(return_value=fake_process),
|
||||
),
|
||||
pytest.raises(SmartThingsAudioError, match="Unable to convert"),
|
||||
):
|
||||
await manager._transcode_to_pcm("https://example.com/source.mp3")
|
||||
|
||||
assert any("FFmpeg failed" in record.message for record in caplog.records)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_times_out_and_kills_process(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Kill ffmpeg when the transcode times out."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
fake_process = _FakeProcess(stdout=b"\x00\x00", stderr=b"", returncode=0)
|
||||
caplog.set_level(logging.WARNING)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
AsyncMock(return_value=fake_process),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.wait_for",
|
||||
side_effect=TimeoutError,
|
||||
),
|
||||
):
|
||||
pcm, duration, truncated = await manager._transcode_to_pcm(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
assert fake_process.killed is True
|
||||
assert pcm == b"\x00\x00"
|
||||
assert duration == pytest.approx(1 / PCM_SAMPLE_RATE)
|
||||
assert truncated is False
|
||||
assert any("FFmpeg timed out" in record.message for record in caplog.records)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_returns_empty_audio(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Return empty payload when ffmpeg produced nothing."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
fake_process = _FakeProcess(stdout=b"", stderr=b"", returncode=0)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
AsyncMock(return_value=fake_process),
|
||||
) as mock_exec,
|
||||
):
|
||||
pcm, duration, truncated = await manager._transcode_to_pcm(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
assert pcm == b""
|
||||
assert duration == 0.0
|
||||
assert truncated is False
|
||||
mock_exec.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_enforces_duration_cap(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Ensure ffmpeg is instructed to limit duration and timeout is enforced."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
pcm_bytes = _build_pcm(duration_seconds=FFMPEG_MAX_DURATION_SECONDS)
|
||||
fake_process = _FakeProcess(stdout=pcm_bytes, stderr=b"", returncode=0)
|
||||
|
||||
timeouts: list[float] = []
|
||||
original_wait_for = asyncio.wait_for
|
||||
|
||||
async def _wait_for(awaitable, timeout):
|
||||
timeouts.append(timeout)
|
||||
return await original_wait_for(awaitable, timeout)
|
||||
|
||||
mock_exec = AsyncMock(return_value=fake_process)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
mock_exec,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.wait_for",
|
||||
new=_wait_for,
|
||||
),
|
||||
):
|
||||
pcm, duration, truncated = await manager._transcode_to_pcm(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
command = list(mock_exec.await_args.args)
|
||||
assert "-t" in command
|
||||
assert command[command.index("-t") + 1] == str(FFMPEG_MAX_DURATION_SECONDS)
|
||||
assert timeouts == [TRANSCODE_TIMEOUT_SECONDS]
|
||||
assert pcm == pcm_bytes
|
||||
assert duration == pytest.approx(FFMPEG_MAX_DURATION_SECONDS)
|
||||
assert truncated is True
|
||||
|
||||
|
||||
async def test_transcode_to_pcm_logs_misaligned_pcm(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Log debug output when ffmpeg output contains a partial frame."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
pcm_bytes = _build_pcm() + b"\xaa"
|
||||
fake_process = _FakeProcess(stdout=pcm_bytes, stderr=b"", returncode=0)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
AsyncMock(return_value=fake_process),
|
||||
),
|
||||
):
|
||||
pcm, duration, truncated = await manager._transcode_to_pcm(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
assert pcm == _build_pcm()
|
||||
assert duration > 0
|
||||
assert truncated is False
|
||||
assert any("misaligned PCM" in record.message for record in caplog.records)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transcode_to_pcm_drops_partial_frame_payload(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Drop audio entirely when ffmpeg returns fewer bytes than a full frame."""
|
||||
|
||||
manager = await async_get_audio_manager(hass)
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
fake_process = _FakeProcess(stdout=b"\x00", stderr=b"", returncode=0)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.ffmpeg.get_ffmpeg_manager",
|
||||
return_value=SimpleNamespace(binary="ffmpeg"),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.audio.asyncio.create_subprocess_exec",
|
||||
AsyncMock(return_value=fake_process),
|
||||
),
|
||||
):
|
||||
pcm, duration, truncated = await manager._transcode_to_pcm(
|
||||
"https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
assert pcm == b""
|
||||
assert duration == 0.0
|
||||
assert truncated is False
|
||||
assert any("misaligned PCM" in record.message for record in caplog.records)
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Test for the SmartThings media player platform."""
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from pysmartthings import Attribute, Capability, Command, Status
|
||||
from pysmartthings.models import HealthStatus
|
||||
@@ -10,19 +9,14 @@ from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_INPUT_SOURCE,
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
ATTR_MEDIA_CONTENT_TYPE,
|
||||
ATTR_MEDIA_REPEAT,
|
||||
ATTR_MEDIA_SHUFFLE,
|
||||
ATTR_MEDIA_VOLUME_LEVEL,
|
||||
ATTR_MEDIA_VOLUME_MUTED,
|
||||
DOMAIN as MEDIA_PLAYER_DOMAIN,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SERVICE_SELECT_SOURCE,
|
||||
MediaType,
|
||||
RepeatMode,
|
||||
)
|
||||
from homeassistant.components.smartthings.audio import SmartThingsAudioError
|
||||
from homeassistant.components.smartthings.const import MAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -45,7 +39,6 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import (
|
||||
@@ -205,176 +198,6 @@ async def test_volume_down(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_play_media_notification(
|
||||
hass: HomeAssistant,
|
||||
devices: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test playing media via SmartThings audio notification."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
manager = AsyncMock()
|
||||
manager.async_prepare_notification.return_value = "https://example.com/audio.pcm"
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_get_audio_manager",
|
||||
AsyncMock(return_value=manager),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_process_play_media_url",
|
||||
return_value="https://example.com/source.mp3",
|
||||
),
|
||||
):
|
||||
await hass.services.async_call(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
{
|
||||
ATTR_ENTITY_ID: "media_player.soundbar",
|
||||
ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC,
|
||||
ATTR_MEDIA_CONTENT_ID: "https://example.com/source.mp3",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
expected_command = Command("playTrackAndResume")
|
||||
devices.execute_device_command.assert_called_once_with(
|
||||
"afcf3b91-0000-1111-2222-ddff2a0a6577",
|
||||
Capability.AUDIO_NOTIFICATION,
|
||||
expected_command,
|
||||
MAIN,
|
||||
argument=["https://example.com/audio.pcm"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_play_media_requires_audio_notification_capability(
|
||||
hass: HomeAssistant,
|
||||
devices: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Expect an error if the device lacks audio notification support."""
|
||||
|
||||
devices.get_device_status.return_value[MAIN].pop(
|
||||
Capability.AUDIO_NOTIFICATION, None
|
||||
)
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
entity = hass.data["entity_components"][MEDIA_PLAYER_DOMAIN].get_entity(
|
||||
"media_player.soundbar"
|
||||
)
|
||||
assert entity is not None
|
||||
|
||||
with pytest.raises(
|
||||
HomeAssistantError, match="Device does not support audio notifications"
|
||||
):
|
||||
await entity.async_play_media(MediaType.MUSIC, "https://example.com/source.mp3")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_play_media_rejects_unsupported_media_type(
|
||||
hass: HomeAssistant,
|
||||
devices: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Unsupported media types should raise an error."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
entity = hass.data["entity_components"][MEDIA_PLAYER_DOMAIN].get_entity(
|
||||
"media_player.soundbar"
|
||||
)
|
||||
assert entity is not None
|
||||
|
||||
with pytest.raises(
|
||||
HomeAssistantError, match="Unsupported media type for SmartThings audio"
|
||||
):
|
||||
await entity.async_play_media(
|
||||
MediaType.TVSHOW, "https://example.com/source.mp3"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_play_media_uses_media_source_resolution(
|
||||
hass: HomeAssistant,
|
||||
devices: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Media source IDs are resolved and processed before playback."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
manager = AsyncMock()
|
||||
manager.async_prepare_notification.return_value = "https://example.com/audio.pcm"
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_get_audio_manager",
|
||||
AsyncMock(return_value=manager),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_process_play_media_url",
|
||||
return_value="https://example.com/processed.mp3",
|
||||
) as mock_process,
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.media_source.is_media_source_id",
|
||||
return_value=True,
|
||||
) as mock_is_media,
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.media_source.async_resolve_media",
|
||||
AsyncMock(
|
||||
return_value=SimpleNamespace(url="https://example.com/from_source")
|
||||
),
|
||||
) as mock_resolve,
|
||||
):
|
||||
entity = hass.data["entity_components"][MEDIA_PLAYER_DOMAIN].get_entity(
|
||||
"media_player.soundbar"
|
||||
)
|
||||
assert entity is not None
|
||||
|
||||
await entity.async_play_media(MediaType.MUSIC, "media-source://foo")
|
||||
|
||||
mock_is_media.assert_called_once()
|
||||
mock_resolve.assert_called_once()
|
||||
mock_process.assert_called_with(hass, "https://example.com/from_source")
|
||||
devices.execute_device_command.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_play_media_wraps_audio_errors(
|
||||
hass: HomeAssistant,
|
||||
devices: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""SmartThings audio errors propagate as HomeAssistantError."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
manager = AsyncMock()
|
||||
manager.async_prepare_notification.side_effect = SmartThingsAudioError("boom")
|
||||
|
||||
entity = hass.data["entity_components"][MEDIA_PLAYER_DOMAIN].get_entity(
|
||||
"media_player.soundbar"
|
||||
)
|
||||
assert entity is not None
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_get_audio_manager",
|
||||
AsyncMock(return_value=manager),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.smartthings.media_player.async_process_play_media_url",
|
||||
return_value="https://example.com/source.mp3",
|
||||
),
|
||||
pytest.raises(HomeAssistantError, match="boom"),
|
||||
):
|
||||
await entity.async_play_media(MediaType.MUSIC, "https://example.com/source.mp3")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"])
|
||||
async def test_media_play(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -260,7 +260,6 @@ async def test_remove_privacy_zone(
|
||||
assert not doorbell.privacy_zones
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def get_user_keyring_info(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Tests for analytics platform."""
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.analytics import async_devices_payload
|
||||
from homeassistant.components.wled import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -11,7 +9,6 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics(
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
|
||||
@@ -4,8 +4,9 @@ from copy import deepcopy
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from zwave_js_server.client import Client
|
||||
from zwave_js_server.event import Event
|
||||
from zwave_js_server.model.node import Node
|
||||
from zwave_js_server.model.node import Node, NodeDataType
|
||||
|
||||
from homeassistant.components.zwave_js import DOMAIN
|
||||
from homeassistant.components.zwave_js.const import CONF_KEEP_OLD_DEVICES
|
||||
@@ -23,9 +24,12 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
|
||||
|
||||
async def _trigger_repair_issue(
|
||||
hass: HomeAssistant, client, multisensor_6_state
|
||||
hass: HomeAssistant,
|
||||
client: Client,
|
||||
multisensor_6_state: NodeDataType,
|
||||
device_config_changed: bool = True,
|
||||
) -> Node:
|
||||
"""Trigger repair issue."""
|
||||
"""Trigger repair issue with configurable device config changed status."""
|
||||
# Create a node
|
||||
node_state = deepcopy(multisensor_6_state)
|
||||
node = Node(client, node_state)
|
||||
@@ -40,7 +44,7 @@ async def _trigger_repair_issue(
|
||||
)
|
||||
with patch(
|
||||
"zwave_js_server.model.node.Node.async_has_device_config_changed",
|
||||
return_value=True,
|
||||
return_value=device_config_changed,
|
||||
):
|
||||
client.driver.controller.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
@@ -55,9 +59,9 @@ async def test_device_config_file_changed_confirm_step(
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
client,
|
||||
multisensor_6_state,
|
||||
integration,
|
||||
client: Client,
|
||||
multisensor_6_state: NodeDataType,
|
||||
integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the device_config_file_changed issue confirm step."""
|
||||
node = await _trigger_repair_issue(hass, client, multisensor_6_state)
|
||||
@@ -116,14 +120,54 @@ async def test_device_config_file_changed_confirm_step(
|
||||
assert len(msg["result"]["issues"]) == 0
|
||||
|
||||
|
||||
async def test_device_config_file_changed_cleared(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
client: Client,
|
||||
multisensor_6_state: NodeDataType,
|
||||
integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the device_config_file_changed issue is cleared when no longer true."""
|
||||
node = await _trigger_repair_issue(hass, client, multisensor_6_state)
|
||||
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={get_device_id(client.driver, node)}
|
||||
)
|
||||
assert device
|
||||
issue_id = f"device_config_file_changed.{device.id}"
|
||||
|
||||
await async_process_repairs_platforms(hass)
|
||||
ws_client = await hass_ws_client(hass)
|
||||
|
||||
# Assert the issue is present
|
||||
await ws_client.send_json({"id": 1, "type": "repairs/list_issues"})
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
assert len(msg["result"]["issues"]) == 1
|
||||
issue = msg["result"]["issues"][0]
|
||||
assert issue["issue_id"] == issue_id
|
||||
|
||||
# Simulate the node becoming ready again with device config no longer changed
|
||||
await _trigger_repair_issue(
|
||||
hass, client, multisensor_6_state, device_config_changed=False
|
||||
)
|
||||
|
||||
# Assert the issue is now cleared
|
||||
await ws_client.send_json({"id": 2, "type": "repairs/list_issues"})
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
assert len(msg["result"]["issues"]) == 0
|
||||
|
||||
|
||||
async def test_device_config_file_changed_ignore_step(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
client,
|
||||
multisensor_6_state,
|
||||
integration,
|
||||
client: Client,
|
||||
multisensor_6_state: NodeDataType,
|
||||
integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the device_config_file_changed issue ignore step."""
|
||||
node = await _trigger_repair_issue(hass, client, multisensor_6_state)
|
||||
@@ -237,9 +281,9 @@ async def test_abort_confirm(
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
client,
|
||||
multisensor_6_state,
|
||||
integration,
|
||||
client: Client,
|
||||
multisensor_6_state: NodeDataType,
|
||||
integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test aborting device_config_file_changed issue in confirm step."""
|
||||
node = await _trigger_repair_issue(hass, client, multisensor_6_state)
|
||||
|
||||
Reference in New Issue
Block a user