Compare commits

..

4 Commits

Author SHA1 Message Date
Paulus Schoutsen
258dfdda8f Remove None support from ESPHome supported_frequency_ranges
Always return frequency ranges from device info, matching the
updated RadioFrequencyTransmitterEntity contract.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:43:58 -04:00
Paulus Schoutsen
dcc0745fd2 Remove None support from supported_frequency_ranges
Require all RadioFrequencyTransmitterEntity subclasses to provide
explicit frequency ranges.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:43:11 -04:00
Paulus Schoutsen
174c86fd36 Add radio_frequency platform to ESPHome
Implement the new radio_frequency platform on ESPHome, mirroring
the infrared platform pattern.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:31:56 -04:00
Paulus Schoutsen
e0d7e3702c Add radio_frequency entity platform
Add a new radio_frequency entity domain that acts as an abstraction
layer between RF transceiver hardware and device-specific integrations,
following the same pattern as the infrared entity platform.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:31:30 -04:00
134 changed files with 2847 additions and 3798 deletions

View File

@@ -186,11 +186,15 @@ If `CHANGE_TYPE` IS "Breaking change" or "Deprecation", keep the `## Breaking ch
## Step 10: Push Branch and Create PR
Push the branch with upstream tracking, and create a PR against `home-assistant/core` with the generated title and body:
```bash
# Get branch name and GitHub username
BRANCH=$(git branch --show-current)
PUSH_REMOTE=$(git config "branch.$BRANCH.remote" 2>/dev/null || git remote | head -1)
GITHUB_USER=$(gh api user --jq .login 2>/dev/null || git remote get-url "$PUSH_REMOTE" | sed -E 's#.*[:/]([^/]+)/([^/]+)(\.git)?$#\1#')
# Create PR (gh pr create pushes the branch automatically)
gh pr create --repo home-assistant/core --base dev \
--head "$GITHUB_USER:$BRANCH" \
--draft \
--title "TITLE_HERE" \
--body "$(cat <<'EOF'

View File

@@ -36,6 +36,7 @@ base_platforms: &base_platforms
- homeassistant/components/image_processing/**
- homeassistant/components/infrared/**
- homeassistant/components/lawn_mower/**
- homeassistant/components/radio_frequency/**
- homeassistant/components/light/**
- homeassistant/components/lock/**
- homeassistant/components/media_player/**

44
.github/renovate.json vendored
View File

@@ -78,50 +78,6 @@
"enabled": true,
"labels": ["dependency", "core"]
},
{
"description": "Common Python utilities (allowlisted)",
"matchPackageNames": [
"astral",
"atomicwrites-homeassistant",
"audioop-lts",
"awesomeversion",
"bcrypt",
"ciso8601",
"cronsim",
"defusedxml",
"fnv-hash-fast",
"getmac",
"ical",
"ifaddr",
"lru-dict",
"mutagen",
"propcache",
"pyserial",
"python-slugify",
"PyTurboJPEG",
"securetar",
"standard-aifc",
"standard-telnetlib",
"ulid-transform",
"url-normalize",
"xmltodict"
],
"enabled": true,
"labels": ["dependency"]
},
{
"description": "Home Assistant ecosystem packages (core-maintained, no cooldown)",
"matchPackageNames": [
"hassil",
"home-assistant-bluetooth",
"home-assistant-frontend",
"home-assistant-intents",
"infrared-protocols"
],
"enabled": true,
"minimumReleaseAge": null,
"labels": ["dependency", "core"]
},
{
"description": "Test dependencies (allowlisted)",
"matchPackageNames": [

View File

@@ -21,7 +21,7 @@ jobs:
steps:
- name: Check if integration label was added and extract details
id: extract
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
// Debug: Log the event payload
@@ -118,7 +118,7 @@ jobs:
- name: Fetch similar issues
id: fetch_similar
if: steps.extract.outputs.should_continue == 'true'
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
@@ -285,7 +285,7 @@ jobs:
- name: Post duplicate detection results
id: post_results
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}

View File

@@ -21,7 +21,7 @@ jobs:
steps:
- name: Check issue language
id: detect_language
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
ISSUE_NUMBER: ${{ github.event.issue.number }}
ISSUE_TITLE: ${{ github.event.issue.title }}
@@ -95,7 +95,7 @@ jobs:
- name: Process non-English issues
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}

View File

@@ -22,7 +22,7 @@ jobs:
|| github.event.issue.type.name == 'Opportunity'
steps:
- name: Add no-stale label
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
await github.rest.issues.addLabels({
@@ -42,7 +42,7 @@ jobs:
if: github.event.issue.type.name == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const issueAuthor = context.payload.issue.user.login;

View File

@@ -8,7 +8,7 @@ repos:
- id: ruff-format
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
- repo: https://github.com/codespell-project/codespell
rev: v2.4.2
rev: v2.4.1
hooks:
- id: codespell
args:

16
Dockerfile generated
View File

@@ -19,23 +19,25 @@ ENV \
UV_SYSTEM_PYTHON=true \
UV_NO_CACHE=true
WORKDIR /usr/src
# Home Assistant S6-Overlay
COPY rootfs /
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc@sha256:675c318b23c06fd862a61d262240c9a63436b4050d177ffc68a32710d9e05bae /usr/local/bin/go2rtc /bin/go2rtc
RUN \
# Verify go2rtc can be executed
go2rtc --version \
# Install uv
&& pip3 install uv==0.11.1
WORKDIR /usr/src
## Setup Home Assistant Core dependencies
COPY requirements.txt homeassistant/
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
RUN \
# Verify go2rtc can be executed
go2rtc --version \
# Install uv at the version pinned in the requirements file
&& pip3 install --no-cache-dir "uv==$(awk -F'==' '/^uv==/{print $2}' homeassistant/requirements.txt)" \
&& uv pip install \
uv pip install \
--no-build \
-r homeassistant/requirements.txt

View File

@@ -6,11 +6,10 @@ from typing import Final
from homeassistant.const import STATE_OFF, STATE_ON
CONF_READ_TIMEOUT: Final = "timeout"
CONF_WRITE_TIMEOUT: Final = "write_timeout"
DEFAULT_NAME: Final = "Acer Projector"
DEFAULT_READ_TIMEOUT: Final = 1
DEFAULT_TIMEOUT: Final = 1
DEFAULT_WRITE_TIMEOUT: Final = 1
ECO_MODE: Final = "ECO Mode"

View File

@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
"iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["serialx==1.2.2"]
"requirements": ["pyserial==3.5"]
}

View File

@@ -6,7 +6,7 @@ import logging
import re
from typing import Any
from serialx import Serial, SerialException
import serial
import voluptuous as vol
from homeassistant.components.switch import (
@@ -16,22 +16,21 @@ from homeassistant.components.switch import (
from homeassistant.const import (
CONF_FILENAME,
CONF_NAME,
CONF_TIMEOUT,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import (
CMD_DICT,
CONF_READ_TIMEOUT,
CONF_WRITE_TIMEOUT,
DEFAULT_NAME,
DEFAULT_READ_TIMEOUT,
DEFAULT_TIMEOUT,
DEFAULT_WRITE_TIMEOUT,
ECO_MODE,
ICON,
@@ -46,7 +45,7 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.isdevice,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_READ_TIMEOUT, default=DEFAULT_READ_TIMEOUT): cv.positive_int,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(
CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT
): cv.positive_int,
@@ -63,10 +62,10 @@ def setup_platform(
"""Connect with serial port and return Acer Projector."""
serial_port = config[CONF_FILENAME]
name = config[CONF_NAME]
read_timeout = config[CONF_READ_TIMEOUT]
timeout = config[CONF_TIMEOUT]
write_timeout = config[CONF_WRITE_TIMEOUT]
add_entities([AcerSwitch(serial_port, name, read_timeout, write_timeout)], True)
add_entities([AcerSwitch(serial_port, name, timeout, write_timeout)], True)
class AcerSwitch(SwitchEntity):
@@ -78,14 +77,14 @@ class AcerSwitch(SwitchEntity):
self,
serial_port: str,
name: str,
read_timeout: int,
timeout: int,
write_timeout: int,
) -> None:
"""Init of the Acer projector."""
self.serial = serial.Serial(
port=serial_port, timeout=timeout, write_timeout=write_timeout
)
self._serial_port = serial_port
self._read_timeout = read_timeout
self._write_timeout = write_timeout
self._attr_name = name
self._attributes = {
LAMP_HOURS: STATE_UNKNOWN,
@@ -95,26 +94,22 @@ class AcerSwitch(SwitchEntity):
def _write_read(self, msg: str) -> str:
"""Write to the projector and read the return."""
ret = ""
# Sometimes the projector won't answer for no reason or the projector
# was disconnected during runtime.
# This way the projector can be reconnected and will still work
try:
with Serial.from_url(
self._serial_port,
read_timeout=self._read_timeout,
write_timeout=self._write_timeout,
) as serial:
serial.write(msg.encode("utf-8"))
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
return serial.read_until(size=20).decode("utf-8")
except (OSError, SerialException, TimeoutError) as exc:
raise HomeAssistantError(
f"Problem communicating with {self._serial_port}"
) from exc
if not self.serial.is_open:
self.serial.open()
self.serial.write(msg.encode("utf-8"))
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
ret = self.serial.read_until(size=20).decode("utf-8")
except serial.SerialException:
_LOGGER.error("Problem communicating with %s", self._serial_port)
self.serial.close()
return ret
def _write_read_format(self, msg: str) -> str:
"""Write msg, obtain answer and format output."""

View File

@@ -152,7 +152,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
"text",
"timer",
"todo",
"update",
"vacuum",
"valve",
"water_heater",

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/camera",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["PyTurboJPEG==1.8.3"]
"requirements": ["PyTurboJPEG==1.8.0"]
}

View File

@@ -35,6 +35,7 @@ from aioesphomeapi import (
MediaPlayerInfo,
MediaPlayerSupportedFormat,
NumberInfo,
RadioFrequencyInfo,
SelectInfo,
SensorInfo,
SensorState,
@@ -88,6 +89,7 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = {
FanInfo: Platform.FAN,
InfraredInfo: Platform.INFRARED,
LightInfo: Platform.LIGHT,
RadioFrequencyInfo: Platform.RADIO_FREQUENCY,
LockInfo: Platform.LOCK,
MediaPlayerInfo: Platform.MEDIA_PLAYER,
NumberInfo: Platform.NUMBER,

View File

@@ -0,0 +1,79 @@
"""Radio Frequency platform for ESPHome."""
from __future__ import annotations
from functools import partial
import logging
from aioesphomeapi import (
EntityState,
RadioFrequencyCapability,
RadioFrequencyInfo,
RadioFrequencyModulation,
)
from rf_protocols import ModulationType, RadioFrequencyCommand
from homeassistant.components.radio_frequency import RadioFrequencyTransmitterEntity
from homeassistant.core import callback
from .entity import (
EsphomeEntity,
convert_api_error_ha_error,
platform_async_setup_entry,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
MODULATION_TYPE_TO_ESPHOME: dict[ModulationType, RadioFrequencyModulation] = {
ModulationType.OOK: RadioFrequencyModulation.OOK,
}
class EsphomeRadioFrequencyEntity(
EsphomeEntity[RadioFrequencyInfo, EntityState], RadioFrequencyTransmitterEntity
):
"""ESPHome radio frequency entity using native API."""
@property
def supported_frequency_ranges(self) -> list[tuple[int, int]]:
"""Return supported frequency ranges from device info."""
return [(self._static_info.frequency_min, self._static_info.frequency_max)]
@callback
def _on_device_update(self) -> None:
"""Call when device updates or entry data changes."""
super()._on_device_update()
if self._entry_data.available:
self.async_write_ha_state()
@convert_api_error_ha_error
async def async_send_command(self, command: RadioFrequencyCommand) -> None:
"""Send an RF command."""
timings = [
interval
for timing in command.get_raw_timings()
for interval in (timing.high_us, -timing.low_us)
]
_LOGGER.debug("Sending RF command: %s", timings)
self._client.radio_frequency_transmit_raw_timings(
self._static_info.key,
frequency=command.frequency,
timings=timings,
modulation=MODULATION_TYPE_TO_ESPHOME[command.modulation],
repeat_count=command.repeat_count + 1,
device_id=self._static_info.device_id,
)
async_setup_entry = partial(
platform_async_setup_entry,
info_type=RadioFrequencyInfo,
entity_type=EsphomeRadioFrequencyEntity,
state_type=EntityState,
info_filter=lambda info: bool(
info.capabilities & RadioFrequencyCapability.TRANSMITTER
),
)

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from typing import Any
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from .const import DOMAIN, EVENT_HDMI_CEC_UNAVAILABLE
@@ -56,10 +55,9 @@ class CecEntity(Entity):
else:
self._attr_name = f"{self._device.type_name} {self._logical_address} ({self._device.osd_name})"
@callback
def _hdmi_cec_unavailable(self, callback_event):
self._attr_available = False
self.async_write_ha_state()
self.schedule_update_ha_state(False)
async def async_added_to_hass(self) -> None:
"""Register HDMI callbacks after initialization."""

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import logging
from typing import Any
from pycec.commands import CecCommand, KeyPressCommand, KeyReleaseCommand
from pycec.const import (
@@ -30,6 +31,7 @@ from homeassistant.components.media_player import (
MediaPlayerEntity,
MediaPlayerEntityFeature,
MediaPlayerState,
MediaType,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@@ -43,20 +45,20 @@ _LOGGER = logging.getLogger(__name__)
ENTITY_ID_FORMAT = MP_DOMAIN + ".{}"
async def async_setup_platform(
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return HDMI devices as media players."""
"""Find and return HDMI devices as +switches."""
if discovery_info and ATTR_NEW in discovery_info:
_LOGGER.debug("Setting up HDMI devices %s", discovery_info[ATTR_NEW])
entities = []
for device in discovery_info[ATTR_NEW]:
hdmi_device = hass.data[DOMAIN][device]
entities.append(CecPlayerEntity(hdmi_device, hdmi_device.logical_address))
async_add_entities(entities, True)
add_entities(entities, True)
class CecPlayerEntity(CecEntity, MediaPlayerEntity):
@@ -77,61 +79,78 @@ class CecPlayerEntity(CecEntity, MediaPlayerEntity):
def send_playback(self, key):
"""Send playback status to CEC adapter."""
self._device.send_command(CecCommand(key, dst=self._logical_address))
self._device.async_send_command(CecCommand(key, dst=self._logical_address))
async def async_mute_volume(self, mute: bool) -> None:
def mute_volume(self, mute: bool) -> None:
"""Mute volume."""
self.send_keypress(KEY_MUTE_TOGGLE)
async def async_media_previous_track(self) -> None:
def media_previous_track(self) -> None:
"""Go to previous track."""
self.send_keypress(KEY_BACKWARD)
async def async_turn_on(self) -> None:
def turn_on(self) -> None:
"""Turn device on."""
self._device.turn_on()
self._attr_state = MediaPlayerState.ON
self.async_write_ha_state()
async def async_turn_off(self) -> None:
def clear_playlist(self) -> None:
"""Clear players playlist."""
raise NotImplementedError
def turn_off(self) -> None:
"""Turn device off."""
self._device.turn_off()
self._attr_state = MediaPlayerState.OFF
self.async_write_ha_state()
async def async_media_stop(self) -> None:
def media_stop(self) -> None:
"""Stop playback."""
self.send_keypress(KEY_STOP)
self._attr_state = MediaPlayerState.IDLE
self.async_write_ha_state()
async def async_media_next_track(self) -> None:
def play_media(
self, media_type: MediaType | str, media_id: str, **kwargs: Any
) -> None:
"""Not supported."""
raise NotImplementedError
def media_next_track(self) -> None:
"""Skip to next track."""
self.send_keypress(KEY_FORWARD)
async def async_media_pause(self) -> None:
def media_seek(self, position: float) -> None:
"""Not supported."""
raise NotImplementedError
def set_volume_level(self, volume: float) -> None:
"""Set volume level, range 0..1."""
raise NotImplementedError
def media_pause(self) -> None:
"""Pause playback."""
self.send_keypress(KEY_PAUSE)
self._attr_state = MediaPlayerState.PAUSED
self.async_write_ha_state()
async def async_media_play(self) -> None:
def select_source(self, source: str) -> None:
"""Not supported."""
raise NotImplementedError
def media_play(self) -> None:
"""Start playback."""
self.send_keypress(KEY_PLAY)
self._attr_state = MediaPlayerState.PLAYING
self.async_write_ha_state()
async def async_volume_up(self) -> None:
def volume_up(self) -> None:
"""Increase volume."""
_LOGGER.debug("%s: volume up", self._logical_address)
self.send_keypress(KEY_VOLUME_UP)
async def async_volume_down(self) -> None:
def volume_down(self) -> None:
"""Decrease volume."""
_LOGGER.debug("%s: volume down", self._logical_address)
self.send_keypress(KEY_VOLUME_DOWN)
async def async_update(self) -> None:
def update(self) -> None:
"""Update device status."""
device = self._device
if device.power_status in [POWER_OFF, 3]:

View File

@@ -20,10 +20,10 @@ _LOGGER = logging.getLogger(__name__)
ENTITY_ID_FORMAT = SWITCH_DOMAIN + ".{}"
async def async_setup_platform(
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return HDMI devices as switches."""
@@ -33,7 +33,7 @@ async def async_setup_platform(
for device in discovery_info[ATTR_NEW]:
hdmi_device = hass.data[DOMAIN][device]
entities.append(CecSwitchEntity(hdmi_device, hdmi_device.logical_address))
async_add_entities(entities, True)
add_entities(entities, True)
class CecSwitchEntity(CecEntity, SwitchEntity):
@@ -44,19 +44,19 @@ class CecSwitchEntity(CecEntity, SwitchEntity):
CecEntity.__init__(self, device, logical)
self.entity_id = f"{SWITCH_DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
async def async_turn_on(self, **kwargs: Any) -> None:
def turn_on(self, **kwargs: Any) -> None:
"""Turn device on."""
self._device.turn_on()
self._attr_is_on = True
self.async_write_ha_state()
self.schedule_update_ha_state(force_refresh=False)
async def async_turn_off(self, **kwargs: Any) -> None:
def turn_off(self, **kwargs: Any) -> None:
"""Turn device off."""
self._device.turn_off()
self._attr_is_on = False
self.async_write_ha_state()
self.schedule_update_ha_state(force_refresh=False)
async def async_update(self) -> None:
def update(self) -> None:
"""Update device status."""
device = self._device
if device.power_status in {POWER_OFF, 3}:

View File

@@ -10,7 +10,7 @@
"loggers": ["pyhap"],
"requirements": [
"HAP-python==5.0.0",
"fnv-hash-fast==2.0.2",
"fnv-hash-fast==2.0.0",
"homekit-audio-proxy==1.2.1",
"PyQRCode==1.2.1",
"base36==0.1.1"

View File

@@ -75,6 +75,7 @@ from .const import ( # noqa: F401
ATTR_GROUP_MEMBERS,
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_LAST_NON_BUFFERING_STATE,
ATTR_MEDIA_ALBUM_ARTIST,
ATTR_MEDIA_ALBUM_NAME,
ATTR_MEDIA_ANNOUNCE,
@@ -587,6 +588,8 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
_attr_volume_level: float | None = None
_attr_volume_step: float
__last_non_buffering_state: MediaPlayerState | None = None
# Implement these for your media player
@cached_property
def device_class(self) -> MediaPlayerDeviceClass | None:
@@ -1124,7 +1127,12 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
@property
def state_attributes(self) -> dict[str, Any]:
"""Return the state attributes."""
state_attr: dict[str, Any] = {}
if (state := self.state) != MediaPlayerState.BUFFERING:
self.__last_non_buffering_state = state
state_attr: dict[str, Any] = {
ATTR_LAST_NON_BUFFERING_STATE: self.__last_non_buffering_state
}
if self.support_grouping:
state_attr[ATTR_GROUP_MEMBERS] = self.group_members

View File

@@ -13,6 +13,7 @@ ATTR_ENTITY_PICTURE_LOCAL = "entity_picture_local"
ATTR_GROUP_MEMBERS = "group_members"
ATTR_INPUT_SOURCE = "source"
ATTR_INPUT_SOURCE_LIST = "source_list"
ATTR_LAST_NON_BUFFERING_STATE = "last_non_buffering_state"
ATTR_MEDIA_ANNOUNCE = "announce"
ATTR_MEDIA_ALBUM_ARTIST = "media_album_artist"
ATTR_MEDIA_ALBUM_NAME = "media_album_name"

View File

@@ -123,20 +123,8 @@
}
},
"triggers": {
"paused_playing": {
"trigger": "mdi:pause"
},
"started_playing": {
"trigger": "mdi:play"
},
"stopped_playing": {
"trigger": "mdi:stop"
},
"turned_off": {
"trigger": "mdi:power"
},
"turned_on": {
"trigger": "mdi:power"
}
}
}

View File

@@ -433,50 +433,14 @@
},
"title": "Media player",
"triggers": {
"paused_playing": {
"description": "Triggers after one or more media players pause playing.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
}
},
"name": "Media player paused playing"
},
"started_playing": {
"description": "Triggers after one or more media players start playing.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
}
},
"name": "Media player started playing"
},
"stopped_playing": {
"description": "Triggers after one or more media players stop playing.",
"description": "Triggers after one or more media players stop playing media.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
}
},
"name": "Media player stopped playing"
},
"turned_off": {
"description": "Triggers after one or more media players turn off.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
}
},
"name": "Media player turned off"
},
"turned_on": {
"description": "Triggers after one or more media players turn on.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
}
},
"name": "Media player turned on"
}
}
}

View File

@@ -7,29 +7,6 @@ from . import MediaPlayerState
from .const import DOMAIN
TRIGGERS: dict[str, type[Trigger]] = {
"paused_playing": make_entity_transition_trigger(
DOMAIN,
from_states={
MediaPlayerState.BUFFERING,
MediaPlayerState.PLAYING,
},
to_states={
MediaPlayerState.PAUSED,
},
),
"started_playing": make_entity_transition_trigger(
DOMAIN,
from_states={
MediaPlayerState.IDLE,
MediaPlayerState.OFF,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
},
to_states={
MediaPlayerState.BUFFERING,
MediaPlayerState.PLAYING,
},
),
"stopped_playing": make_entity_transition_trigger(
DOMAIN,
from_states={
@@ -43,32 +20,6 @@ TRIGGERS: dict[str, type[Trigger]] = {
MediaPlayerState.ON,
},
),
"turned_off": make_entity_transition_trigger(
DOMAIN,
from_states={
MediaPlayerState.BUFFERING,
MediaPlayerState.IDLE,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
MediaPlayerState.PLAYING,
},
to_states={
MediaPlayerState.OFF,
},
),
"turned_on": make_entity_transition_trigger(
DOMAIN,
from_states={
MediaPlayerState.OFF,
},
to_states={
MediaPlayerState.BUFFERING,
MediaPlayerState.IDLE,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
MediaPlayerState.PLAYING,
},
),
}

View File

@@ -1,4 +1,4 @@
.trigger_common: &trigger_common
stopped_playing:
target:
entity:
domain: media_player
@@ -13,9 +13,3 @@
- first
- last
- any
paused_playing: *trigger_common
started_playing: *trigger_common
stopped_playing: *trigger_common
turned_off: *trigger_common
turned_on: *trigger_common

View File

@@ -1,6 +1,5 @@
"""Device tracker for Mobile app."""
from collections.abc import Callable
from typing import Any
from homeassistant.components.device_tracker import (
@@ -54,11 +53,11 @@ async def async_setup_entry(
class MobileAppEntity(TrackerEntity, RestoreEntity):
"""Represent a tracked device."""
def __init__(self, entry: ConfigEntry) -> None:
def __init__(self, entry, data=None):
"""Set up Mobile app entity."""
self._entry = entry
self._data: dict[str, Any] = {}
self._dispatch_unsub: Callable[[], None] | None = None
self._data = data
self._dispatch_unsub = None
@property
def unique_id(self) -> str:
@@ -133,7 +132,12 @@ class MobileAppEntity(TrackerEntity, RestoreEntity):
self.update_data,
)
# Don't restore if we got set up with data.
if self._data is not None:
return
if (state := await self.async_get_last_state()) is None:
self._data = {}
return
attr = state.attributes
@@ -154,7 +158,7 @@ class MobileAppEntity(TrackerEntity, RestoreEntity):
self._dispatch_unsub = None
@callback
def update_data(self, data: dict[str, Any]) -> None:
def update_data(self, data):
"""Mark the device as seen."""
self._data = data
self.async_write_ha_state()

View File

@@ -1,8 +1,7 @@
{
"common": {
"condition_behavior_name": "Condition passes if",
"trigger_behavior_name": "Trigger when",
"trigger_for_name": "For at least"
"trigger_behavior_name": "Trigger when"
},
"conditions": {
"is_detected": {
@@ -46,9 +45,6 @@
"fields": {
"behavior": {
"name": "[%key:component::motion::common::trigger_behavior_name%]"
},
"for": {
"name": "[%key:component::motion::common::trigger_for_name%]"
}
},
"name": "Motion cleared"
@@ -58,9 +54,6 @@
"fields": {
"behavior": {
"name": "[%key:component::motion::common::trigger_behavior_name%]"
},
"for": {
"name": "[%key:component::motion::common::trigger_for_name%]"
}
},
"name": "Motion detected"

View File

@@ -9,11 +9,6 @@
- first
- last
- any
for:
required: true
default: 00:00:00
selector:
duration:
detected:
fields: *trigger_common_fields

View File

@@ -54,7 +54,7 @@ class MotionMountErrorStatusSensor(MotionMountEntity, SensorEntity):
def __init__(
self, mm: motionmount.MotionMount, config_entry: MotionMountConfigEntry
) -> None:
"""Initialize sensor entity."""
"""Initialize sensor entiry."""
super().__init__(mm, config_entry)
self._attr_unique_id = f"{self._base_unique_id}-error-status"

View File

@@ -0,0 +1,189 @@
"""Provides functionality to interact with radio frequency devices."""
from __future__ import annotations
from abc import abstractmethod
from datetime import timedelta
import logging
from typing import final
from rf_protocols import ModulationType, RadioFrequencyCommand
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from homeassistant.util.hass_dict import HassKey
from .const import DOMAIN
__all__ = [
"DOMAIN",
"ModulationType",
"RadioFrequencyTransmitterEntity",
"RadioFrequencyTransmitterEntityDescription",
"async_get_transmitters",
"async_send_command",
]
_LOGGER = logging.getLogger(__name__)
DATA_COMPONENT: HassKey[EntityComponent[RadioFrequencyTransmitterEntity]] = HassKey(
DOMAIN
)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the radio_frequency domain."""
component = hass.data[DATA_COMPONENT] = EntityComponent[
RadioFrequencyTransmitterEntity
](_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
@callback
def async_get_transmitters(
hass: HomeAssistant,
frequency: int,
modulation: ModulationType,
) -> list[str]:
"""Get entity IDs of all RF transmitters supporting the given frequency.
An empty list means no compatible transmitters.
Raises:
HomeAssistantError: If no transmitters exist.
"""
component = hass.data.get(DATA_COMPONENT)
if component is None:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="component_not_loaded",
)
entities = list(component.entities)
if not entities:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="no_transmitters",
)
return [
entity.entity_id
for entity in entities
if any(
low <= frequency <= high for low, high in entity.supported_frequency_ranges
)
]
async def async_send_command(
hass: HomeAssistant,
entity_id_or_uuid: str,
command: RadioFrequencyCommand,
context: Context | None = None,
) -> None:
"""Send an RF command to the specified radio_frequency entity.
Raises:
HomeAssistantError: If the radio_frequency entity is not found.
"""
component = hass.data.get(DATA_COMPONENT)
if component is None:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="component_not_loaded",
)
ent_reg = er.async_get(hass)
entity_id = er.async_validate_entity_id(ent_reg, entity_id_or_uuid)
entity = component.get_entity(entity_id)
if entity is None:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="entity_not_found",
translation_placeholders={"entity_id": entity_id},
)
if context is not None:
entity.async_set_context(context)
await entity.async_send_command_internal(command)
class RadioFrequencyTransmitterEntityDescription(
EntityDescription, frozen_or_thawed=True
):
"""Describes radio frequency transmitter entities."""
class RadioFrequencyTransmitterEntity(RestoreEntity):
"""Base class for radio frequency transmitter entities."""
entity_description: RadioFrequencyTransmitterEntityDescription
_attr_should_poll = False
_attr_state: None = None
__last_command_sent: str | None = None
@property
def supported_frequency_ranges(self) -> list[tuple[int, int]]:
"""Return list of (min_hz, max_hz) tuples."""
raise NotImplementedError
@property
@final
def state(self) -> str | None:
"""Return the entity state."""
return self.__last_command_sent
@final
async def async_send_command_internal(self, command: RadioFrequencyCommand) -> None:
"""Send an RF command and update state.
Should not be overridden, handles setting last sent timestamp.
"""
await self.async_send_command(command)
self.__last_command_sent = dt_util.utcnow().isoformat(timespec="milliseconds")
self.async_write_ha_state()
@final
async def async_internal_added_to_hass(self) -> None:
"""Call when the radio frequency entity is added to hass."""
await super().async_internal_added_to_hass()
state = await self.async_get_last_state()
if state is not None and state.state not in (STATE_UNAVAILABLE, None):
self.__last_command_sent = state.state
@abstractmethod
async def async_send_command(self, command: RadioFrequencyCommand) -> None:
"""Send an RF command.
Args:
command: The RF command to send.
Raises:
HomeAssistantError: If transmission fails.
"""

View File

@@ -0,0 +1,5 @@
"""Constants for the Radio Frequency integration."""
from typing import Final
DOMAIN: Final = "radio_frequency"

View File

@@ -0,0 +1,7 @@
{
"entity_component": {
"_": {
"default": "mdi:radio-tower"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"domain": "radio_frequency",
"name": "Radio Frequency",
"codeowners": ["@home-assistant/core"],
"documentation": "https://www.home-assistant.io/integrations/radio_frequency",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["rf-protocols==0.0.1"]
}

View File

@@ -0,0 +1,13 @@
{
"exceptions": {
"component_not_loaded": {
"message": "Radio Frequency component not loaded"
},
"entity_not_found": {
"message": "Radio Frequency entity `{entity_id}` not found"
},
"no_transmitters": {
"message": "No Radio Frequency transmitters available"
}
}
}

View File

@@ -0,0 +1,13 @@
{
"exceptions": {
"component_not_loaded": {
"message": "Radio Frequency component not loaded"
},
"entity_not_found": {
"message": "Radio Frequency entity `{entity_id}` not found"
},
"no_transmitters": {
"message": "No Radio Frequency transmitters available"
}
}
}

View File

@@ -192,7 +192,7 @@ ID_TYPE = BigInteger().with_variant(sqlite.INTEGER, "sqlite")
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
# for sqlite and postgresql we use a bigint
UINT_32_TYPE = BigInteger().with_variant(
mysql.INTEGER(unsigned=True),
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
"mysql",
"mariadb",
)
@@ -206,12 +206,12 @@ JSONB_VARIANT_CAST = Text().with_variant(
)
DATETIME_TYPE = (
DateTime(timezone=True)
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb")
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call]
)
DOUBLE_TYPE = (
Float()
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb")
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
)

View File

@@ -7,8 +7,8 @@
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": [
"SQLAlchemy==2.0.49",
"fnv-hash-fast==2.0.2",
"SQLAlchemy==2.0.41",
"fnv-hash-fast==2.0.0",
"psutil-home-assistant==0.0.1"
]
}

View File

@@ -447,10 +447,10 @@ def setup_connection_for_dialect(
slow_dependent_subquery = False
if dialect_name == SupportedDialect.SQLITE:
if first_connection:
old_isolation = dbapi_connection.isolation_level
dbapi_connection.isolation_level = None
old_isolation = dbapi_connection.isolation_level # type: ignore[attr-defined]
dbapi_connection.isolation_level = None # type: ignore[attr-defined]
execute_on_connection(dbapi_connection, "PRAGMA journal_mode=WAL")
dbapi_connection.isolation_level = old_isolation
dbapi_connection.isolation_level = old_isolation # type: ignore[attr-defined]
# WAL mode only needs to be setup once
# instead of every time we open the sqlite connection
# as its persistent and isn't free to call every time.

View File

@@ -4,40 +4,27 @@ from __future__ import annotations
import asyncio
from collections.abc import Coroutine
from copy import deepcopy
from datetime import timedelta
import logging
from types import MappingProxyType
from typing import Any
import voluptuous as vol
from homeassistant.components.rest import RESOURCE_SCHEMA, create_rest_data_from_config
from homeassistant.components.sensor import CONF_STATE_CLASS, DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_ATTRIBUTE,
CONF_AUTHENTICATION,
CONF_DEVICE_CLASS,
CONF_HEADERS,
CONF_NAME,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_TIMEOUT,
CONF_UNIQUE_ID,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
discovery,
entity_registry as er,
)
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.trigger_template_entity import (
CONF_AVAILABILITY,
TEMPLATE_SENSOR_BASE_SCHEMA,
@@ -45,22 +32,11 @@ from homeassistant.helpers.trigger_template_entity import (
)
from homeassistant.helpers.typing import ConfigType
from .const import (
CONF_ADVANCED,
CONF_AUTH,
CONF_ENCODING,
CONF_INDEX,
CONF_SELECT,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
PLATFORMS,
)
from .const import CONF_INDEX, CONF_SELECT, DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS
from .coordinator import ScrapeCoordinator
type ScrapeConfigEntry = ConfigEntry[ScrapeCoordinator]
_LOGGER = logging.getLogger(__name__)
SENSOR_SCHEMA = vol.Schema(
{
**TEMPLATE_SENSOR_BASE_SCHEMA.schema,
@@ -127,13 +103,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bool:
"""Set up Scrape from a config entry."""
config: dict[str, Any] = dict(entry.options)
# Config flow uses sections but the COMBINED SCHEMA does not
# so we need to flatten the config here
config.update(config.pop(CONF_ADVANCED, {}))
config.update(config.pop(CONF_AUTH, {}))
rest_config: dict[str, Any] = COMBINED_SCHEMA(dict(config))
rest_config: dict[str, Any] = COMBINED_SCHEMA(dict(entry.options))
rest = create_rest_data_from_config(hass, rest_config)
coordinator = ScrapeCoordinator(
@@ -147,159 +117,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bo
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
async def async_migrate_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bool:
"""Migrate old entry."""
if entry.version > 2:
# Don't migrate from future version
return False
if entry.version == 1:
old_to_new_sensor_id = {}
for sensor_config in entry.options[SENSOR_DOMAIN]:
# Create a new sub config entry per sensor
title = sensor_config[CONF_NAME]
old_unique_id = sensor_config[CONF_UNIQUE_ID]
subentry_config = {
CONF_INDEX: sensor_config[CONF_INDEX],
CONF_SELECT: sensor_config[CONF_SELECT],
CONF_ADVANCED: {},
}
for sensor_advanced_key in (
CONF_ATTRIBUTE,
CONF_VALUE_TEMPLATE,
CONF_AVAILABILITY,
CONF_DEVICE_CLASS,
CONF_STATE_CLASS,
CONF_UNIT_OF_MEASUREMENT,
):
if sensor_advanced_key not in sensor_config:
continue
subentry_config[CONF_ADVANCED][sensor_advanced_key] = sensor_config[
sensor_advanced_key
]
new_sub_entry = ConfigSubentry(
data=MappingProxyType(subentry_config),
subentry_type="entity",
title=title,
unique_id=None,
)
_LOGGER.debug(
"Migrating sensor %s with unique id %s to sub config entry id %s, old data %s, new data %s",
title,
old_unique_id,
new_sub_entry.subentry_id,
sensor_config,
subentry_config,
)
old_to_new_sensor_id[old_unique_id] = new_sub_entry.subentry_id
hass.config_entries.async_add_subentry(entry, new_sub_entry)
# Use the new sub config entry id as the unique id for the sensor entity
entity_reg = er.async_get(hass)
entities = er.async_entries_for_config_entry(entity_reg, entry.entry_id)
for entity in entities:
if (old_unique_id := entity.unique_id) in old_to_new_sensor_id:
new_unique_id = old_to_new_sensor_id[old_unique_id]
_LOGGER.debug(
"Migrating entity %s with unique id %s to new unique id %s",
entity.entity_id,
entity.unique_id,
new_unique_id,
)
entity_reg.async_update_entity(
entity.entity_id,
config_entry_id=entry.entry_id,
config_subentry_id=new_unique_id,
new_unique_id=new_unique_id,
)
# Use the new sub config entry id as the identifier for the sensor device
device_reg = dr.async_get(hass)
devices = dr.async_entries_for_config_entry(device_reg, entry.entry_id)
for device in devices:
for domain, identifier in device.identifiers:
if domain != DOMAIN or identifier not in old_to_new_sensor_id:
continue
subentry_id = old_to_new_sensor_id[identifier]
new_identifiers = deepcopy(device.identifiers)
new_identifiers.remove((domain, identifier))
new_identifiers.add((domain, old_to_new_sensor_id[identifier]))
_LOGGER.debug(
"Migrating device %s with identifiers %s to new identifiers %s",
device.id,
device.identifiers,
new_identifiers,
)
device_reg.async_update_device(
device.id,
add_config_entry_id=entry.entry_id,
add_config_subentry_id=subentry_id,
new_identifiers=new_identifiers,
)
# Removing None from the list of subentries if existing
# as the device should only belong to the subentry
# and not the main config entry
device_reg.async_update_device(
device.id,
remove_config_entry_id=entry.entry_id,
remove_config_subentry_id=None,
)
# Update the resource config
new_config_entry_data = dict(entry.options)
new_config_entry_data[CONF_AUTH] = {}
new_config_entry_data[CONF_ADVANCED] = {}
new_config_entry_data.pop(SENSOR_DOMAIN, None)
for resource_advanced_key in (
CONF_HEADERS,
CONF_VERIFY_SSL,
CONF_TIMEOUT,
CONF_ENCODING,
):
if resource_advanced_key in new_config_entry_data:
new_config_entry_data[CONF_ADVANCED][resource_advanced_key] = (
new_config_entry_data.pop(resource_advanced_key)
)
for resource_auth_key in (CONF_AUTHENTICATION, CONF_USERNAME, CONF_PASSWORD):
if resource_auth_key in new_config_entry_data:
new_config_entry_data[CONF_AUTH][resource_auth_key] = (
new_config_entry_data.pop(resource_auth_key)
)
_LOGGER.debug(
"Migrating config entry %s from version 1 to version 2 with data %s",
entry.entry_id,
new_config_entry_data,
)
hass.config_entries.async_update_entry(
entry, version=2, options=new_config_entry_data
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Scrape config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ScrapeConfigEntry) -> None:
"""Handle config entry update."""
hass.config_entries.async_schedule_reload(entry.entry_id)
async def async_remove_config_entry_device(
hass: HomeAssistant, entry: ConfigEntry, device: dr.DeviceEntry
hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry
) -> bool:
"""Remove Scrape config entry from a device."""
entity_registry = er.async_get(hass)

View File

@@ -2,13 +2,12 @@
from __future__ import annotations
from copy import deepcopy
import logging
from typing import Any
from collections.abc import Mapping
from typing import Any, cast
import uuid
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.rest import create_rest_data_from_config
from homeassistant.components.rest.data import ( # pylint: disable=hass-component-root-import
DEFAULT_TIMEOUT,
@@ -19,17 +18,10 @@ from homeassistant.components.rest.schema import ( # pylint: disable=hass-compo
)
from homeassistant.components.sensor import (
CONF_STATE_CLASS,
DOMAIN as SENSOR_DOMAIN,
SensorDeviceClass,
SensorStateClass,
)
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
OptionsFlow,
SubentryFlowResult,
)
from homeassistant.const import (
CONF_ATTRIBUTE,
CONF_AUTHENTICATION,
@@ -41,6 +33,7 @@ from homeassistant.const import (
CONF_PAYLOAD,
CONF_RESOURCE,
CONF_TIMEOUT,
CONF_UNIQUE_ID,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
@@ -49,7 +42,15 @@ from homeassistant.const import (
HTTP_DIGEST_AUTHENTICATION,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import async_get_hass
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.schema_config_entry_flow import (
SchemaCommonFlowHandler,
SchemaConfigFlowHandler,
SchemaFlowError,
SchemaFlowFormStep,
SchemaFlowMenuStep,
)
from homeassistant.helpers.selector import (
BooleanSelector,
NumberSelector,
@@ -68,8 +69,6 @@ from homeassistant.helpers.trigger_template_entity import CONF_AVAILABILITY
from . import COMBINED_SCHEMA
from .const import (
CONF_ADVANCED,
CONF_AUTH,
CONF_ENCODING,
CONF_INDEX,
CONF_SELECT,
@@ -79,212 +78,243 @@ from .const import (
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
RESOURCE_SETUP = {
vol.Required(CONF_RESOURCE): TextSelector(
TextSelectorConfig(type=TextSelectorType.URL)
),
vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): SelectSelector(
SelectSelectorConfig(options=METHODS, mode=SelectSelectorMode.DROPDOWN)
),
vol.Optional(CONF_PAYLOAD): ObjectSelector(),
vol.Optional(CONF_AUTHENTICATION): SelectSelector(
SelectSelectorConfig(
options=[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION],
mode=SelectSelectorMode.DROPDOWN,
)
),
vol.Optional(CONF_USERNAME): TextSelector(),
vol.Optional(CONF_PASSWORD): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
vol.Optional(CONF_HEADERS): ObjectSelector(),
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): BooleanSelector(),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): NumberSelector(
NumberSelectorConfig(min=0, step=1, mode=NumberSelectorMode.BOX)
),
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): TextSelector(),
}
RESOURCE_SETUP = vol.Schema(
{
vol.Required(CONF_RESOURCE): TextSelector(
TextSelectorConfig(type=TextSelectorType.URL)
),
vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): SelectSelector(
SelectSelectorConfig(options=METHODS, mode=SelectSelectorMode.DROPDOWN)
),
vol.Optional(CONF_PAYLOAD): ObjectSelector(),
vol.Required(CONF_AUTH): data_entry_flow.section(
vol.Schema(
{
vol.Optional(CONF_AUTHENTICATION): SelectSelector(
SelectSelectorConfig(
options=[
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
],
mode=SelectSelectorMode.DROPDOWN,
)
),
vol.Optional(CONF_USERNAME): TextSelector(
TextSelectorConfig(
type=TextSelectorType.TEXT, autocomplete="username"
)
),
vol.Optional(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
data_entry_flow.SectionConfig(collapsed=True),
),
vol.Required(CONF_ADVANCED): data_entry_flow.section(
vol.Schema(
{
vol.Optional(CONF_HEADERS): ObjectSelector(),
vol.Optional(
CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL
): BooleanSelector(),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): NumberSelector(
NumberSelectorConfig(min=0, step=1, mode=NumberSelectorMode.BOX)
),
vol.Optional(
CONF_ENCODING, default=DEFAULT_ENCODING
): TextSelector(),
}
),
data_entry_flow.SectionConfig(collapsed=True),
),
}
)
SENSOR_SETUP = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector(),
vol.Required(CONF_SELECT): TextSelector(),
vol.Optional(CONF_INDEX, default=0): vol.All(
NumberSelector(
NumberSelectorConfig(min=0, step=1, mode=NumberSelectorMode.BOX)
),
vol.Coerce(int),
),
vol.Required(CONF_ADVANCED): data_entry_flow.section(
vol.Schema(
{
vol.Optional(CONF_ATTRIBUTE): TextSelector(),
vol.Optional(CONF_VALUE_TEMPLATE): TemplateSelector(),
vol.Optional(CONF_AVAILABILITY): TemplateSelector(),
vol.Optional(CONF_DEVICE_CLASS): SelectSelector(
SelectSelectorConfig(
options=[
cls.value
for cls in SensorDeviceClass
if cls != SensorDeviceClass.ENUM
],
mode=SelectSelectorMode.DROPDOWN,
translation_key="device_class",
sort=True,
)
),
vol.Optional(CONF_STATE_CLASS): SelectSelector(
SelectSelectorConfig(
options=[cls.value for cls in SensorStateClass],
mode=SelectSelectorMode.DROPDOWN,
translation_key="state_class",
sort=True,
)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): SelectSelector(
SelectSelectorConfig(
options=[cls.value for cls in UnitOfTemperature],
custom_value=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key="unit_of_measurement",
sort=True,
)
),
}
),
data_entry_flow.SectionConfig(collapsed=True),
),
}
)
SENSOR_SETUP = {
vol.Required(CONF_SELECT): TextSelector(),
vol.Optional(CONF_INDEX, default=0): NumberSelector(
NumberSelectorConfig(min=0, step=1, mode=NumberSelectorMode.BOX)
),
vol.Optional(CONF_ATTRIBUTE): TextSelector(),
vol.Optional(CONF_VALUE_TEMPLATE): TemplateSelector(),
vol.Optional(CONF_AVAILABILITY): TemplateSelector(),
vol.Optional(CONF_DEVICE_CLASS): SelectSelector(
SelectSelectorConfig(
options=[
cls.value for cls in SensorDeviceClass if cls != SensorDeviceClass.ENUM
],
mode=SelectSelectorMode.DROPDOWN,
translation_key="device_class",
sort=True,
)
),
vol.Optional(CONF_STATE_CLASS): SelectSelector(
SelectSelectorConfig(
options=[cls.value for cls in SensorStateClass],
mode=SelectSelectorMode.DROPDOWN,
translation_key="state_class",
sort=True,
)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): SelectSelector(
SelectSelectorConfig(
options=[cls.value for cls in UnitOfTemperature],
custom_value=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key="unit_of_measurement",
sort=True,
)
),
}
async def validate_rest_setup(
hass: HomeAssistant, user_input: dict[str, Any]
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Validate rest setup."""
config = deepcopy(user_input)
config.update(config.pop(CONF_ADVANCED, {}))
config.update(config.pop(CONF_AUTH, {}))
rest_config: dict[str, Any] = COMBINED_SCHEMA(config)
hass = async_get_hass()
rest_config: dict[str, Any] = COMBINED_SCHEMA(user_input)
try:
rest = create_rest_data_from_config(hass, rest_config)
await rest.async_update()
except Exception:
_LOGGER.exception("Error when getting resource %s", config[CONF_RESOURCE])
return {"base": "resource_error"}
except Exception as err:
raise SchemaFlowError("resource_error") from err
if rest.data is None:
return {"base": "no_data"}
raise SchemaFlowError("resource_error")
return user_input
async def validate_sensor_setup(
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Validate sensor input."""
user_input[CONF_INDEX] = int(user_input[CONF_INDEX])
user_input[CONF_UNIQUE_ID] = str(uuid.uuid1())
# Standard behavior is to merge the result with the options.
# In this case, we want to add a sub-item so we update the options directly.
sensors: list[dict[str, Any]] = handler.options.setdefault(SENSOR_DOMAIN, [])
sensors.append(user_input)
return {}
class ScrapeConfigFlow(ConfigFlow, domain=DOMAIN):
"""Scrape configuration flow."""
VERSION = 2
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> ScrapeOptionFlow:
"""Get the options flow for this handler."""
return ScrapeOptionFlow()
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this handler."""
return {"entity": ScrapeSubentryFlowHandler}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""User flow to create the main config entry."""
errors: dict[str, str] = {}
if user_input is not None:
errors = await validate_rest_setup(self.hass, user_input)
title = user_input[CONF_RESOURCE]
if not errors:
return self.async_create_entry(data={}, options=user_input, title=title)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
RESOURCE_SETUP, user_input or {}
),
errors=errors,
)
async def validate_select_sensor(
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Store sensor index in flow state."""
handler.flow_state["_idx"] = int(user_input[CONF_INDEX])
return {}
class ScrapeOptionFlow(OptionsFlow):
"""Scrape Options flow."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Manage Scrape options."""
errors: dict[str, str] = {}
if user_input is not None:
errors = await validate_rest_setup(self.hass, user_input)
if not errors:
return self.async_create_entry(data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
RESOURCE_SETUP,
user_input or self.config_entry.options,
),
errors=errors,
)
async def get_select_sensor_schema(handler: SchemaCommonFlowHandler) -> vol.Schema:
"""Return schema for selecting a sensor."""
return vol.Schema(
{
vol.Required(CONF_INDEX): vol.In(
{
str(index): config[CONF_NAME]
for index, config in enumerate(handler.options[SENSOR_DOMAIN])
},
)
}
)
class ScrapeSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow."""
async def get_edit_sensor_suggested_values(
handler: SchemaCommonFlowHandler,
) -> dict[str, Any]:
"""Return suggested values for sensor editing."""
idx: int = handler.flow_state["_idx"]
return dict(handler.options[SENSOR_DOMAIN][idx])
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to create a sensor subentry."""
if user_input is not None:
title = user_input.pop("name")
return self.async_create_entry(data=user_input, title=title)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
SENSOR_SETUP, user_input or {}
),
)
async def validate_sensor_edit(
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Update edited sensor."""
user_input[CONF_INDEX] = int(user_input[CONF_INDEX])
# Standard behavior is to merge the result with the options.
# In this case, we want to add a sub-item so we update the options directly,
# including popping omitted optional schema items.
idx: int = handler.flow_state["_idx"]
handler.options[SENSOR_DOMAIN][idx].update(user_input)
for key in DATA_SCHEMA_EDIT_SENSOR.schema:
if isinstance(key, vol.Optional) and key not in user_input:
# Key not present, delete keys old value (if present) too
handler.options[SENSOR_DOMAIN][idx].pop(key, None)
return {}
async def get_remove_sensor_schema(handler: SchemaCommonFlowHandler) -> vol.Schema:
"""Return schema for sensor removal."""
return vol.Schema(
{
vol.Required(CONF_INDEX): cv.multi_select(
{
str(index): config[CONF_NAME]
for index, config in enumerate(handler.options[SENSOR_DOMAIN])
},
)
}
)
async def validate_remove_sensor(
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Validate remove sensor."""
removed_indexes: set[str] = set(user_input[CONF_INDEX])
# Standard behavior is to merge the result with the options.
# In this case, we want to remove sub-items so we update the options directly.
entity_registry = er.async_get(handler.parent_handler.hass)
sensors: list[dict[str, Any]] = []
sensor: dict[str, Any]
for index, sensor in enumerate(handler.options[SENSOR_DOMAIN]):
if str(index) not in removed_indexes:
sensors.append(sensor)
elif entity_id := entity_registry.async_get_entity_id(
SENSOR_DOMAIN, DOMAIN, sensor[CONF_UNIQUE_ID]
):
entity_registry.async_remove(entity_id)
handler.options[SENSOR_DOMAIN] = sensors
return {}
DATA_SCHEMA_RESOURCE = vol.Schema(RESOURCE_SETUP)
DATA_SCHEMA_EDIT_SENSOR = vol.Schema(SENSOR_SETUP)
DATA_SCHEMA_SENSOR = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector(),
**SENSOR_SETUP,
}
)
CONFIG_FLOW = {
"user": SchemaFlowFormStep(
schema=DATA_SCHEMA_RESOURCE,
next_step="sensor",
validate_user_input=validate_rest_setup,
),
"sensor": SchemaFlowFormStep(
schema=DATA_SCHEMA_SENSOR,
validate_user_input=validate_sensor_setup,
),
}
OPTIONS_FLOW = {
"init": SchemaFlowMenuStep(
["resource", "add_sensor", "select_edit_sensor", "remove_sensor"]
),
"resource": SchemaFlowFormStep(
DATA_SCHEMA_RESOURCE,
validate_user_input=validate_rest_setup,
),
"add_sensor": SchemaFlowFormStep(
DATA_SCHEMA_SENSOR,
suggested_values=None,
validate_user_input=validate_sensor_setup,
),
"select_edit_sensor": SchemaFlowFormStep(
get_select_sensor_schema,
suggested_values=None,
validate_user_input=validate_select_sensor,
next_step="edit_sensor",
),
"edit_sensor": SchemaFlowFormStep(
DATA_SCHEMA_EDIT_SENSOR,
suggested_values=get_edit_sensor_suggested_values,
validate_user_input=validate_sensor_edit,
),
"remove_sensor": SchemaFlowFormStep(
get_remove_sensor_schema,
suggested_values=None,
validate_user_input=validate_remove_sensor,
),
}
class ScrapeConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
"""Handle a config flow for Scrape."""
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""
return cast(str, options[CONF_RESOURCE])

View File

@@ -14,8 +14,6 @@ DEFAULT_SCAN_INTERVAL = timedelta(minutes=10)
PLATFORMS = [Platform.SENSOR]
CONF_ADVANCED = "advanced"
CONF_AUTH = "auth"
CONF_ENCODING = "encoding"
CONF_SELECT = "select"
CONF_INDEX = "index"

View File

@@ -1,21 +0,0 @@
{
"config": {
"step": {
"user": {
"sections": {
"advanced": "mdi:cog",
"auth": "mdi:lock"
}
}
}
},
"options": {
"step": {
"init": {
"sections": {
"advanced": "mdi:cog"
}
}
}
}
}

View File

@@ -46,10 +46,9 @@ TRIGGER_ENTITY_OPTIONS = (
CONF_AVAILABILITY,
CONF_DEVICE_CLASS,
CONF_ICON,
CONF_NAME,
CONF_PICTURE,
CONF_STATE_CLASS,
CONF_UNIQUE_ID,
CONF_STATE_CLASS,
CONF_UNIT_OF_MEASUREMENT,
)
@@ -71,7 +70,7 @@ async def async_setup_platform(
entities: list[ScrapeSensor] = []
for sensor_config in sensors_config:
trigger_entity_config = {}
trigger_entity_config = {CONF_NAME: sensor_config[CONF_NAME]}
for key in TRIGGER_ENTITY_OPTIONS:
if key not in sensor_config:
continue
@@ -99,24 +98,23 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Scrape sensor entry."""
coordinator = entry.runtime_data
for subentry in entry.subentries.values():
sensor = dict(subentry.data)
sensor.update(sensor.pop("advanced", {}))
sensor[CONF_UNIQUE_ID] = subentry.subentry_id
sensor[CONF_NAME] = subentry.title
entities: list = []
coordinator = entry.runtime_data
config = dict(entry.options)
for sensor in config["sensor"]:
sensor_config: ConfigType = vol.Schema(
TEMPLATE_SENSOR_BASE_SCHEMA.schema, extra=vol.ALLOW_EXTRA
)(sensor)
name: str = sensor_config[CONF_NAME]
value_string: str | None = sensor_config.get(CONF_VALUE_TEMPLATE)
value_template: ValueTemplate | None = (
ValueTemplate(value_string, hass) if value_string is not None else None
)
trigger_entity_config: dict[str, str | Template | None] = {}
trigger_entity_config: dict[str, str | Template | None] = {CONF_NAME: name}
for key in TRIGGER_ENTITY_OPTIONS:
if key not in sensor_config:
continue
@@ -125,22 +123,21 @@ async def async_setup_entry(
continue
trigger_entity_config[key] = sensor_config[key]
async_add_entities(
[
ScrapeSensor(
hass,
coordinator,
trigger_entity_config,
sensor_config[CONF_SELECT],
sensor_config.get(CONF_ATTRIBUTE),
sensor_config[CONF_INDEX],
value_template,
False,
)
],
config_subentry_id=subentry.subentry_id,
entities.append(
ScrapeSensor(
hass,
coordinator,
trigger_entity_config,
sensor_config[CONF_SELECT],
sensor_config.get(CONF_ATTRIBUTE),
sensor_config[CONF_INDEX],
value_template,
False,
)
)
async_add_entities(entities)
class ScrapeSensor(CoordinatorEntity[ScrapeCoordinator], ManualTriggerSensorEntity):
"""Representation of a web scrape sensor."""

View File

@@ -4,140 +4,134 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
},
"error": {
"no_data": "REST data is empty. Verify your configuration",
"resource_error": "Could not update REST data. Verify your configuration"
"resource_error": "Could not update rest data. Verify your configuration"
},
"step": {
"user": {
"sensor": {
"data": {
"method": "Method",
"payload": "Payload",
"resource": "Resource"
"attribute": "Attribute",
"availability": "Availability template",
"device_class": "Device class",
"index": "Index",
"name": "[%key:common::config_flow::data::name%]",
"select": "Select",
"state_class": "State class",
"unit_of_measurement": "Unit of measurement",
"value_template": "Value template"
},
"data_description": {
"payload": "Payload to use when method is POST.",
"resource": "The URL to the website that contains the value."
},
"sections": {
"advanced": {
"data": {
"encoding": "Character encoding",
"headers": "Headers",
"timeout": "Timeout",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"encoding": "Character encoding to use. Defaults to UTF-8.",
"headers": "Headers to use for the web request.",
"timeout": "Timeout for connection to website.",
"verify_ssl": "Enables/disables verification of SSL/TLS certificate, for example if it is self-signed."
},
"description": "Provide additional advanced settings for the resource.",
"name": "Advanced settings"
},
"auth": {
"data": {
"authentication": "Select authentication method",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"authentication": "Type of the HTTP authentication. Either basic or digest."
},
"description": "Provide authentication details to access the resource.",
"name": "Authentication settings"
}
"attribute": "Get value of an attribute on the selected tag.",
"availability": "Defines a template to get the availability of the sensor.",
"device_class": "The type/class of the sensor to set the icon in the frontend.",
"index": "Defines which of the elements returned by the CSS selector to use.",
"select": "Defines what tag to search for. Check Beautifulsoup CSS selectors for details.",
"state_class": "The state_class of the sensor.",
"unit_of_measurement": "Choose unit of measurement or create your own.",
"value_template": "Defines a template to get the state of the sensor."
}
}
}
},
"config_subentries": {
"entity": {
"entry_type": "Sensor",
"initiate_flow": {
"user": "Add sensor"
},
"step": {
"user": {
"data": {
"index": "Index",
"select": "Select"
},
"data_description": {
"index": "Defines which of the elements returned by the CSS selector to use.",
"select": "Defines what tag to search for. Check Beautifulsoup CSS selectors for details."
},
"sections": {
"advanced": {
"data": {
"attribute": "Attribute",
"availability": "Availability template",
"device_class": "Device class",
"state_class": "State class",
"unit_of_measurement": "Unit of measurement",
"value_template": "Value template"
},
"data_description": {
"attribute": "Get value of an attribute on the selected tag.",
"availability": "Defines a template to get the availability of the sensor.",
"device_class": "The type/class of the sensor to set the icon in the frontend.",
"state_class": "The state_class of the sensor.",
"unit_of_measurement": "Choose unit of measurement or create your own.",
"value_template": "Defines a template to get the state of the sensor."
},
"description": "Provide additional advanced settings for the sensor.",
"name": "Advanced settings"
}
}
"user": {
"data": {
"authentication": "Select authentication method",
"encoding": "Character encoding",
"headers": "Headers",
"method": "Method",
"password": "[%key:common::config_flow::data::password%]",
"payload": "Payload",
"resource": "Resource",
"timeout": "Timeout",
"username": "[%key:common::config_flow::data::username%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"authentication": "Type of the HTTP authentication. Either basic or digest.",
"encoding": "Character encoding to use. Defaults to UTF-8.",
"headers": "Headers to use for the web request.",
"payload": "Payload to use when method is POST.",
"resource": "The URL to the website that contains the value.",
"timeout": "Timeout for connection to website.",
"verify_ssl": "Enables/disables verification of SSL/TLS certificate, for example if it is self-signed."
}
}
}
},
"options": {
"error": {
"no_data": "[%key:component::scrape::config::error::no_data%]",
"resource_error": "[%key:component::scrape::config::error::resource_error%]"
},
"step": {
"init": {
"add_sensor": {
"data": {
"method": "[%key:component::scrape::config::step::user::data::method%]",
"payload": "[%key:component::scrape::config::step::user::data::payload%]",
"resource": "[%key:component::scrape::config::step::user::data::resource%]"
"attribute": "[%key:component::scrape::config::step::sensor::data::attribute%]",
"availability": "[%key:component::scrape::config::step::sensor::data::availability%]",
"device_class": "[%key:component::scrape::config::step::sensor::data::device_class%]",
"index": "[%key:component::scrape::config::step::sensor::data::index%]",
"name": "[%key:common::config_flow::data::name%]",
"select": "[%key:component::scrape::config::step::sensor::data::select%]",
"state_class": "[%key:component::scrape::config::step::sensor::data::state_class%]",
"unit_of_measurement": "[%key:component::scrape::config::step::sensor::data::unit_of_measurement%]",
"value_template": "[%key:component::scrape::config::step::sensor::data::value_template%]"
},
"data_description": {
"payload": "[%key:component::scrape::config::step::user::data_description::payload%]",
"resource": "[%key:component::scrape::config::step::user::data_description::resource%]"
"attribute": "[%key:component::scrape::config::step::sensor::data_description::attribute%]",
"availability": "[%key:component::scrape::config::step::sensor::data_description::availability%]",
"device_class": "[%key:component::scrape::config::step::sensor::data_description::device_class%]",
"index": "[%key:component::scrape::config::step::sensor::data_description::index%]",
"select": "[%key:component::scrape::config::step::sensor::data_description::select%]",
"state_class": "[%key:component::scrape::config::step::sensor::data_description::state_class%]",
"unit_of_measurement": "[%key:component::scrape::config::step::sensor::data_description::unit_of_measurement%]",
"value_template": "[%key:component::scrape::config::step::sensor::data_description::value_template%]"
}
},
"edit_sensor": {
"data": {
"attribute": "[%key:component::scrape::config::step::sensor::data::attribute%]",
"availability": "[%key:component::scrape::config::step::sensor::data::availability%]",
"device_class": "[%key:component::scrape::config::step::sensor::data::device_class%]",
"index": "[%key:component::scrape::config::step::sensor::data::index%]",
"name": "[%key:common::config_flow::data::name%]",
"select": "[%key:component::scrape::config::step::sensor::data::select%]",
"state_class": "[%key:component::scrape::config::step::sensor::data::state_class%]",
"unit_of_measurement": "[%key:component::scrape::config::step::sensor::data::unit_of_measurement%]",
"value_template": "[%key:component::scrape::config::step::sensor::data::value_template%]"
},
"sections": {
"advanced": {
"data": {
"encoding": "[%key:component::scrape::config::step::user::sections::advanced::data::encoding%]",
"headers": "[%key:component::scrape::config::step::user::sections::advanced::data::headers%]",
"timeout": "[%key:component::scrape::config::step::user::sections::advanced::data::timeout%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"encoding": "[%key:component::scrape::config::step::user::sections::advanced::data_description::encoding%]",
"headers": "[%key:component::scrape::config::step::user::sections::advanced::data_description::headers%]",
"timeout": "[%key:component::scrape::config::step::user::sections::advanced::data_description::timeout%]",
"verify_ssl": "[%key:component::scrape::config::step::user::sections::advanced::data_description::verify_ssl%]"
},
"description": "[%key:component::scrape::config::step::user::sections::advanced::description%]",
"name": "[%key:component::scrape::config::step::user::sections::advanced::name%]"
},
"auth": {
"data": {
"authentication": "[%key:component::scrape::config::step::user::sections::auth::data::authentication%]",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"authentication": "[%key:component::scrape::config::step::user::sections::auth::data_description::authentication%]"
},
"description": "[%key:component::scrape::config::step::user::sections::auth::description%]",
"name": "[%key:component::scrape::config::step::user::sections::auth::name%]"
}
"data_description": {
"attribute": "[%key:component::scrape::config::step::sensor::data_description::attribute%]",
"availability": "[%key:component::scrape::config::step::sensor::data_description::availability%]",
"device_class": "[%key:component::scrape::config::step::sensor::data_description::device_class%]",
"index": "[%key:component::scrape::config::step::sensor::data_description::index%]",
"select": "[%key:component::scrape::config::step::sensor::data_description::select%]",
"state_class": "[%key:component::scrape::config::step::sensor::data_description::state_class%]",
"unit_of_measurement": "[%key:component::scrape::config::step::sensor::data_description::unit_of_measurement%]",
"value_template": "[%key:component::scrape::config::step::sensor::data_description::value_template%]"
}
},
"init": {
"menu_options": {
"add_sensor": "Add sensor",
"remove_sensor": "Remove sensor",
"resource": "Configure resource",
"select_edit_sensor": "Configure sensor"
}
},
"resource": {
"data": {
"authentication": "[%key:component::scrape::config::step::user::data::authentication%]",
"encoding": "[%key:component::scrape::config::step::user::data::encoding%]",
"headers": "[%key:component::scrape::config::step::user::data::headers%]",
"method": "[%key:component::scrape::config::step::user::data::method%]",
"password": "[%key:common::config_flow::data::password%]",
"payload": "[%key:component::scrape::config::step::user::data::payload%]",
"resource": "[%key:component::scrape::config::step::user::data::resource%]",
"timeout": "[%key:component::scrape::config::step::user::data::timeout%]",
"username": "[%key:common::config_flow::data::username%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"authentication": "[%key:component::scrape::config::step::user::data_description::authentication%]",
"encoding": "[%key:component::scrape::config::step::user::data_description::encoding%]",
"headers": "[%key:component::scrape::config::step::user::data_description::headers%]",
"payload": "[%key:component::scrape::config::step::user::data_description::payload%]",
"resource": "[%key:component::scrape::config::step::user::data_description::resource%]",
"timeout": "[%key:component::scrape::config::step::user::data_description::timeout%]",
"verify_ssl": "[%key:component::scrape::config::step::user::data_description::verify_ssl%]"
}
}
}

View File

@@ -4,5 +4,5 @@
"codeowners": ["@fabaff"],
"documentation": "https://www.home-assistant.io/integrations/serial",
"iot_class": "local_polling",
"requirements": ["serialx==1.2.2"]
"requirements": ["pyserial-asyncio-fast==0.16"]
}

View File

@@ -3,11 +3,11 @@
from __future__ import annotations
import asyncio
from asyncio import Task
import json
import logging
from serialx import Parity, SerialException, StopBits, open_serial_connection
from serial import SerialException
import serial_asyncio_fast as serial_asyncio
import voluptuous as vol
from homeassistant.components.sensor import (
@@ -18,7 +18,6 @@ from homeassistant.const import CONF_NAME, CONF_VALUE_TEMPLATE, EVENT_HOMEASSIST
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__)
@@ -34,9 +33,9 @@ CONF_DSRDTR = "dsrdtr"
DEFAULT_NAME = "Serial Sensor"
DEFAULT_BAUDRATE = 9600
DEFAULT_BYTESIZE = 8
DEFAULT_PARITY = Parity.NONE
DEFAULT_STOPBITS = StopBits.ONE
DEFAULT_BYTESIZE = serial_asyncio.serial.EIGHTBITS
DEFAULT_PARITY = serial_asyncio.serial.PARITY_NONE
DEFAULT_STOPBITS = serial_asyncio.serial.STOPBITS_ONE
DEFAULT_XONXOFF = False
DEFAULT_RTSCTS = False
DEFAULT_DSRDTR = False
@@ -47,21 +46,28 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
vol.Optional(CONF_BAUDRATE, default=DEFAULT_BAUDRATE): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_BYTESIZE, default=DEFAULT_BYTESIZE): vol.In([5, 6, 7, 8]),
vol.Optional(CONF_BYTESIZE, default=DEFAULT_BYTESIZE): vol.In(
[
serial_asyncio.serial.FIVEBITS,
serial_asyncio.serial.SIXBITS,
serial_asyncio.serial.SEVENBITS,
serial_asyncio.serial.EIGHTBITS,
]
),
vol.Optional(CONF_PARITY, default=DEFAULT_PARITY): vol.In(
[
Parity.NONE,
Parity.EVEN,
Parity.ODD,
Parity.MARK,
Parity.SPACE,
serial_asyncio.serial.PARITY_NONE,
serial_asyncio.serial.PARITY_EVEN,
serial_asyncio.serial.PARITY_ODD,
serial_asyncio.serial.PARITY_MARK,
serial_asyncio.serial.PARITY_SPACE,
]
),
vol.Optional(CONF_STOPBITS, default=DEFAULT_STOPBITS): vol.In(
[
StopBits.ONE,
StopBits.ONE_POINT_FIVE,
StopBits.TWO,
serial_asyncio.serial.STOPBITS_ONE,
serial_asyncio.serial.STOPBITS_ONE_POINT_FIVE,
serial_asyncio.serial.STOPBITS_TWO,
]
),
vol.Optional(CONF_XONXOFF, default=DEFAULT_XONXOFF): cv.boolean,
@@ -78,17 +84,28 @@ async def async_setup_platform(
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Serial sensor platform."""
name = config.get(CONF_NAME)
port = config.get(CONF_SERIAL_PORT)
baudrate = config.get(CONF_BAUDRATE)
bytesize = config.get(CONF_BYTESIZE)
parity = config.get(CONF_PARITY)
stopbits = config.get(CONF_STOPBITS)
xonxoff = config.get(CONF_XONXOFF)
rtscts = config.get(CONF_RTSCTS)
dsrdtr = config.get(CONF_DSRDTR)
value_template = config.get(CONF_VALUE_TEMPLATE)
sensor = SerialSensor(
name=config[CONF_NAME],
port=config[CONF_SERIAL_PORT],
baudrate=config[CONF_BAUDRATE],
bytesize=config[CONF_BYTESIZE],
parity=config[CONF_PARITY],
stopbits=config[CONF_STOPBITS],
xonxoff=config[CONF_XONXOFF],
rtscts=config[CONF_RTSCTS],
dsrdtr=config[CONF_DSRDTR],
value_template=config.get(CONF_VALUE_TEMPLATE),
name,
port,
baudrate,
bytesize,
parity,
stopbits,
xonxoff,
rtscts,
dsrdtr,
value_template,
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, sensor.stop_serial_read)
@@ -102,17 +119,17 @@ class SerialSensor(SensorEntity):
def __init__(
self,
name: str,
port: str,
baudrate: int,
bytesize: int,
parity: Parity,
stopbits: StopBits,
xonxoff: bool,
rtscts: bool,
dsrdtr: bool,
value_template: Template | None,
) -> None:
name,
port,
baudrate,
bytesize,
parity,
stopbits,
xonxoff,
rtscts,
dsrdtr,
value_template,
):
"""Initialize the Serial sensor."""
self._attr_name = name
self._port = port
@@ -123,12 +140,12 @@ class SerialSensor(SensorEntity):
self._xonxoff = xonxoff
self._rtscts = rtscts
self._dsrdtr = dsrdtr
self._serial_loop_task: Task[None] | None = None
self._serial_loop_task = None
self._template = value_template
async def async_added_to_hass(self) -> None:
"""Handle when an entity is about to be added to Home Assistant."""
self._serial_loop_task = self.hass.async_create_background_task(
self._serial_loop_task = self.hass.loop.create_task(
self.serial_read(
self._port,
self._baudrate,
@@ -138,31 +155,26 @@ class SerialSensor(SensorEntity):
self._xonxoff,
self._rtscts,
self._dsrdtr,
),
"Serial reader",
)
)
async def serial_read(
self,
device: str,
baudrate: int,
bytesize: int,
parity: Parity,
stopbits: StopBits,
xonxoff: bool,
rtscts: bool,
dsrdtr: bool,
device,
baudrate,
bytesize,
parity,
stopbits,
xonxoff,
rtscts,
dsrdtr,
**kwargs,
):
"""Read the data from the port."""
logged_error = False
while True:
reader = None
writer = None
try:
reader, writer = await open_serial_connection(
reader, _ = await serial_asyncio.open_serial_connection(
url=device,
baudrate=baudrate,
bytesize=bytesize,
@@ -173,7 +185,8 @@ class SerialSensor(SensorEntity):
dsrdtr=dsrdtr,
**kwargs,
)
except OSError, SerialException, TimeoutError:
except SerialException:
if not logged_error:
_LOGGER.exception(
"Unable to connect to the serial device %s. Will retry", device
@@ -184,15 +197,15 @@ class SerialSensor(SensorEntity):
_LOGGER.debug("Serial device %s connected", device)
while True:
try:
line_bytes = await reader.readline()
except OSError, SerialException:
line = await reader.readline()
except SerialException:
_LOGGER.exception(
"Error while reading serial device %s", device
)
await self._handle_error()
break
else:
line = line_bytes.decode("utf-8").strip()
line = line.decode("utf-8").strip()
try:
data = json.loads(line)
@@ -210,10 +223,6 @@ class SerialSensor(SensorEntity):
_LOGGER.debug("Received: %s", line)
self._attr_native_value = line
self.async_write_ha_state()
finally:
if writer is not None:
writer.close()
await writer.wait_closed()
async def _handle_error(self):
"""Handle error for serial connection."""

View File

@@ -807,7 +807,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
)
ssid_options = [network["ssid"] for network in sorted_networks]
# Preselect SSID if returning from failed provisioning attempt
# Pre-select SSID if returning from failed provisioning attempt
suggested_values: dict[str, Any] = {}
if self.selected_ssid:
suggested_values[CONF_SSID] = self.selected_ssid
@@ -1086,7 +1086,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle failed provisioning - allow retry."""
if user_input is not None:
# User wants to retry - keep selected_ssid so it's preselected
# User wants to retry - keep selected_ssid so it's pre-selected
self.wifi_networks = []
return await self.async_step_wifi_scan()

View File

@@ -6,5 +6,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sql",
"iot_class": "local_polling",
"requirements": ["SQLAlchemy==2.0.49", "sqlparse==0.5.5"]
"requirements": ["SQLAlchemy==2.0.41", "sqlparse==0.5.5"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["PyTurboJPEG==1.8.3", "av==16.0.1", "numpy==2.3.2"]
"requirements": ["PyTurboJPEG==1.8.0", "av==16.0.1", "numpy==2.3.2"]
}

View File

@@ -25,6 +25,7 @@ from homeassistant.components.recorder.statistics import (
)
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from homeassistant.util.unit_conversion import EnergyConverter
@@ -91,40 +92,11 @@ def _build_home_data(home: tibber.TibberHome) -> TibberHomeData:
return result
class TibberCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
"""Base Tibber coordinator."""
class TibberDataCoordinator(DataUpdateCoordinator[None]):
"""Handle Tibber data and insert statistics."""
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: TibberConfigEntry,
*,
name: str,
update_interval: timedelta,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=name,
update_interval=update_interval,
)
self._runtime_data = config_entry.runtime_data
async def _async_get_client(self) -> tibber.Tibber:
"""Get the Tibber client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except (ClientError, TimeoutError, tibber.exceptions.HttpExceptionError) as err:
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
class TibberDataCoordinator(TibberCoordinator[None]):
"""Handle Tibber data and insert statistics."""
def __init__(
self,
hass: HomeAssistant,
@@ -134,14 +106,17 @@ class TibberDataCoordinator(TibberCoordinator[None]):
"""Initialize the data handler."""
super().__init__(
hass,
config_entry,
_LOGGER,
config_entry=config_entry,
name=f"Tibber {tibber_connection.name}",
update_interval=timedelta(minutes=20),
)
async def _async_update_data(self) -> None:
"""Update data via API."""
tibber_connection = await self._async_get_client()
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
try:
await tibber_connection.fetch_consumption_data_active_homes()
@@ -157,7 +132,9 @@ class TibberDataCoordinator(TibberCoordinator[None]):
async def _insert_statistics(self) -> None:
"""Insert Tibber statistics."""
tibber_connection = await self._async_get_client()
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
for home in tibber_connection.get_homes():
sensors: list[tuple[str, bool, str | None, str]] = []
if home.hourly_consumption_data:
@@ -277,9 +254,11 @@ class TibberDataCoordinator(TibberCoordinator[None]):
async_add_external_statistics(self.hass, metadata, statistics)
class TibberPriceCoordinator(TibberCoordinator[dict[str, TibberHomeData]]):
class TibberPriceCoordinator(DataUpdateCoordinator[dict[str, TibberHomeData]]):
"""Handle Tibber price data and insert statistics."""
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -288,7 +267,8 @@ class TibberPriceCoordinator(TibberCoordinator[dict[str, TibberHomeData]]):
"""Initialize the price coordinator."""
super().__init__(
hass,
config_entry,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN} price",
update_interval=timedelta(minutes=1),
)
@@ -310,7 +290,9 @@ class TibberPriceCoordinator(TibberCoordinator[dict[str, TibberHomeData]]):
async def _async_update_data(self) -> dict[str, TibberHomeData]:
"""Update data via API and return per-home data for sensors."""
tibber_connection = await self._async_get_client()
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
active_homes = tibber_connection.get_homes(only_active=True)
now = dt_util.now()
@@ -365,9 +347,11 @@ class TibberPriceCoordinator(TibberCoordinator[dict[str, TibberHomeData]]):
return result
class TibberDataAPICoordinator(TibberCoordinator[dict[str, TibberDevice]]):
class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
"""Fetch and cache Tibber Data API device capabilities."""
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -376,10 +360,12 @@ class TibberDataAPICoordinator(TibberCoordinator[dict[str, TibberDevice]]):
"""Initialize the coordinator."""
super().__init__(
hass,
entry,
_LOGGER,
name=f"{DOMAIN} Data API",
update_interval=timedelta(minutes=1),
config_entry=entry,
)
self._runtime_data = entry.runtime_data
self.sensors_by_device: dict[str, dict[str, tibber.data_api.Sensor]] = {}
def _build_sensor_lookup(self, devices: dict[str, TibberDevice]) -> None:
@@ -397,6 +383,15 @@ class TibberDataAPICoordinator(TibberCoordinator[dict[str, TibberDevice]]):
return device_sensors.get(sensor_id)
return None
async def _async_get_client(self) -> tibber.Tibber:
"""Get the Tibber client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
async def _async_setup(self) -> None:
"""Initial load of Tibber Data API devices."""
client = await self._async_get_client()

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["twentemilieu"],
"quality_scale": "silver",
"requirements": ["twentemilieu==3.0.0"]
"requirements": ["twentemilieu==2.2.1"]
}

View File

@@ -1,17 +0,0 @@
"""Provides conditions for updates."""
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from .const import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"is_available": make_entity_state_condition(DOMAIN, STATE_ON),
"is_not_available": make_entity_state_condition(DOMAIN, STATE_OFF),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the update conditions."""
return CONDITIONS

View File

@@ -1,17 +0,0 @@
.condition_common: &condition_common
target:
entity:
domain: update
fields:
behavior:
required: true
default: any
selector:
select:
translation_key: condition_behavior
options:
- all
- any
is_available: *condition_common
is_not_available: *condition_common

View File

@@ -1,12 +1,4 @@
{
"conditions": {
"is_available": {
"condition": "mdi:package-up"
},
"is_not_available": {
"condition": "mdi:package"
}
},
"entity_component": {
"_": {
"default": "mdi:package-up",

View File

@@ -1,28 +1,7 @@
{
"common": {
"condition_behavior_name": "Condition passes if",
"trigger_behavior_name": "Trigger when"
},
"conditions": {
"is_available": {
"description": "Tests if one or more updates are available.",
"fields": {
"behavior": {
"name": "[%key:component::update::common::condition_behavior_name%]"
}
},
"name": "Update is available"
},
"is_not_available": {
"description": "Tests if one or more updates are not available.",
"fields": {
"behavior": {
"name": "[%key:component::update::common::condition_behavior_name%]"
}
},
"name": "Update is not available"
}
},
"device_automation": {
"extra_fields": {
"for": "[%key:common::device_automation::extra_fields::for%]"
@@ -80,12 +59,6 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -26,20 +26,24 @@ from homeassistant.core import (
from homeassistant.helpers import config_validation as cv, discovery_flow
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service_info.usb import UsbServiceInfo
from homeassistant.helpers.service_info.usb import UsbServiceInfo as _UsbServiceInfo
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import USBMatcher, async_get_usb
from homeassistant.util.hass_dict import HassKey
from .const import DOMAIN
from .models import SerialDevice, USBDevice
from .models import (
SerialDevice, # noqa: F401
USBDevice,
)
from .utils import (
async_scan_serial_ports,
scan_serial_ports,
usb_device_from_path,
scan_serial_ports, # noqa: F401
usb_device_from_path, # noqa: F401
usb_device_from_port, # noqa: F401
usb_device_matches_matcher,
usb_service_info_from_device,
usb_unique_id_from_service_info,
usb_unique_id_from_service_info, # noqa: F401
)
_LOGGER = logging.getLogger(__name__)
@@ -52,17 +56,9 @@ REQUEST_SCAN_COOLDOWN = 10 # 10 second cooldown
ADD_REMOVE_SCAN_COOLDOWN = 5 # 5 second cooldown to give devices a chance to register
__all__ = [
"SerialDevice",
"USBCallbackMatcher",
"USBDevice",
"async_register_port_event_callback",
"async_register_scan_request_callback",
"async_scan_serial_ports",
"scan_serial_ports",
"usb_device_from_path",
"usb_device_matches_matcher",
"usb_service_info_from_device",
"usb_unique_id_from_service_info",
]
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
@@ -362,7 +358,7 @@ class USBDiscovery:
for matcher in matched:
for flow in self.hass.config_entries.flow.async_progress_by_init_data_type(
UsbServiceInfo,
_UsbServiceInfo,
lambda flow_service_info: flow_service_info == service_info,
):
if matcher["domain"] != flow["handler"]:

View File

@@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["aiousbwatcher==1.1.1", "serialx==1.2.2"]
"requirements": ["aiousbwatcher==1.1.1", "pyserial==3.5"]
}

View File

@@ -3,10 +3,12 @@
from __future__ import annotations
from collections.abc import Sequence
import dataclasses
import fnmatch
import os
from serialx import SerialPortInfo, list_serial_ports
from serial.tools.list_ports import comports
from serial.tools.list_ports_common import ListPortInfo
from homeassistant.core import HomeAssistant
from homeassistant.helpers.service_info.usb import UsbServiceInfo
@@ -15,8 +17,8 @@ from homeassistant.loader import USBMatcher
from .models import SerialDevice, USBDevice
def usb_device_from_port(port: SerialPortInfo) -> USBDevice:
"""Convert serialx SerialPortInfo to USBDevice."""
def usb_device_from_port(port: ListPortInfo) -> USBDevice:
"""Convert serial ListPortInfo to USBDevice."""
assert port.vid is not None
assert port.pid is not None
@@ -26,30 +28,53 @@ def usb_device_from_port(port: SerialPortInfo) -> USBDevice:
pid=f"{hex(port.pid)[2:]:0>4}".upper(),
serial_number=port.serial_number,
manufacturer=port.manufacturer,
description=port.product,
description=port.description,
)
def serial_device_from_port(port: SerialPortInfo) -> SerialDevice:
"""Convert serialx SerialPortInfo to SerialDevice."""
def serial_device_from_port(port: ListPortInfo) -> SerialDevice:
"""Convert serial ListPortInfo to SerialDevice."""
return SerialDevice(
device=port.device,
serial_number=port.serial_number,
manufacturer=port.manufacturer,
description=port.product,
description=port.description,
)
def usb_serial_device_from_port(port: SerialPortInfo) -> USBDevice | SerialDevice:
"""Convert serialx SerialPortInfo to USBDevice or SerialDevice."""
if port.vid is not None and port.pid is not None:
def usb_serial_device_from_port(port: ListPortInfo) -> USBDevice | SerialDevice:
"""Convert serial ListPortInfo to USBDevice or SerialDevice."""
if port.vid is not None or port.pid is not None:
assert port.vid is not None
assert port.pid is not None
return usb_device_from_port(port)
return serial_device_from_port(port)
def scan_serial_ports() -> Sequence[USBDevice | SerialDevice]:
"""Scan serial ports and return USB and other serial devices."""
return [usb_serial_device_from_port(port) for port in list_serial_ports()]
# Scan all symlinks first
by_id = "/dev/serial/by-id"
realpath_to_by_id: dict[str, str] = {}
if os.path.isdir(by_id):
for path in (entry.path for entry in os.scandir(by_id) if entry.is_symlink()):
realpath_to_by_id[os.path.realpath(path)] = path
serial_ports = []
for port in comports():
device = usb_serial_device_from_port(port)
device_path = realpath_to_by_id.get(port.device, port.device)
if device_path != port.device:
# Prefer the unique /dev/serial/by-id/ path if it exists
device = dataclasses.replace(device, device=device_path)
serial_ports.append(device)
return serial_ports
async def async_scan_serial_ports(

View File

@@ -315,7 +315,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
return await self.async_step_verify_radio()
# Preselect the currently configured port
# Pre-select the currently configured port
default_port: vol.Undefined | str = vol.UNDEFINED
if self._radio_mgr.device_path is not None:
@@ -345,7 +345,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
)
return await self.async_step_manual_port_config()
# Preselect the current radio type
# Pre-select the current radio type
default: vol.Undefined | str = vol.UNDEFINED
if self._radio_mgr.radio_type is not None:

View File

@@ -3,7 +3,6 @@
from typing import Any
import voluptuous as vol
from zwave_js_server.const import CommandClass
from homeassistant.helpers import config_validation as cv
@@ -19,10 +18,6 @@ BITMASK_SCHEMA = vol.All(
lambda value: int(value, 16),
)
COMMAND_CLASS_SCHEMA = vol.All(
vol.Coerce(int), vol.In([cc.value for cc in CommandClass])
)
def boolean(value: Any) -> bool:
"""Validate and coerce a boolean value."""

View File

@@ -30,7 +30,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from .config_validation import COMMAND_CLASS_SCHEMA, VALUE_SCHEMA
from .config_validation import VALUE_SCHEMA
from .const import (
ATTR_COMMAND_CLASS,
ATTR_CONFIG_PARAMETER,
@@ -122,7 +122,7 @@ SET_LOCK_USERCODE_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
SET_VALUE_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): SERVICE_SET_VALUE,
vol.Required(ATTR_COMMAND_CLASS): COMMAND_CLASS_SCHEMA,
vol.Required(ATTR_COMMAND_CLASS): vol.In([cc.value for cc in CommandClass]),
vol.Required(ATTR_PROPERTY): vol.Any(int, str),
vol.Optional(ATTR_PROPERTY_KEY): vol.Any(vol.Coerce(int), cv.string),
vol.Optional(ATTR_ENDPOINT): vol.Coerce(int),
@@ -334,7 +334,7 @@ async def async_get_action_capabilities(
{
vol.Required(ATTR_COMMAND_CLASS): vol.In(
{
str(CommandClass(cc.id).value): cc.name
CommandClass(cc.id).value: cc.name
for cc in sorted(
node.command_classes, key=lambda cc: cc.name
)

View File

@@ -15,7 +15,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from .config_validation import COMMAND_CLASS_SCHEMA, VALUE_SCHEMA
from .config_validation import VALUE_SCHEMA
from .const import (
ATTR_COMMAND_CLASS,
ATTR_ENDPOINT,
@@ -65,7 +65,7 @@ CONFIG_PARAMETER_CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
VALUE_CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): VALUE_TYPE,
vol.Required(ATTR_COMMAND_CLASS): COMMAND_CLASS_SCHEMA,
vol.Required(ATTR_COMMAND_CLASS): vol.In([cc.value for cc in CommandClass]),
vol.Required(ATTR_PROPERTY): vol.Any(vol.Coerce(int), cv.string),
vol.Optional(ATTR_PROPERTY_KEY): vol.Any(vol.Coerce(int), cv.string),
vol.Optional(ATTR_ENDPOINT): vol.Coerce(int),
@@ -221,7 +221,7 @@ async def async_get_condition_capabilities(
{
vol.Required(ATTR_COMMAND_CLASS): vol.In(
{
str(CommandClass(cc.id).value): cc.name
CommandClass(cc.id).value: cc.name
for cc in sorted(
node.command_classes, key=lambda cc: cc.name
)

View File

@@ -31,7 +31,7 @@ from homeassistant.helpers import (
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import ConfigType
from .config_validation import COMMAND_CLASS_SCHEMA, VALUE_SCHEMA
from .config_validation import VALUE_SCHEMA
from .const import (
ATTR_COMMAND_CLASS,
ATTR_DATA_TYPE,
@@ -91,7 +91,7 @@ NOTIFICATION_EVENT_CC_MAPPINGS = (
# Event based trigger schemas
BASE_EVENT_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(ATTR_COMMAND_CLASS): COMMAND_CLASS_SCHEMA,
vol.Required(ATTR_COMMAND_CLASS): vol.In([cc.value for cc in CommandClass]),
}
)
@@ -162,7 +162,7 @@ NODE_STATUS_SCHEMA = BASE_STATE_SCHEMA.extend(
# zwave_js.value_updated based trigger schemas
BASE_VALUE_UPDATED_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(ATTR_COMMAND_CLASS): COMMAND_CLASS_SCHEMA,
vol.Required(ATTR_COMMAND_CLASS): vol.In([cc.value for cc in CommandClass]),
vol.Required(ATTR_PROPERTY): vol.Any(int, str),
vol.Optional(ATTR_PROPERTY_KEY): vol.Any(None, vol.Coerce(int), str),
vol.Optional(ATTR_ENDPOINT, default=0): vol.Any(None, vol.Coerce(int)),
@@ -558,7 +558,7 @@ async def async_get_trigger_capabilities(
{
vol.Required(ATTR_COMMAND_CLASS): vol.In(
{
str(CommandClass(cc.id).value): cc.name
CommandClass(cc.id).value: cc.name
for cc in sorted(
node.command_classes, key=lambda cc: cc.name
)

View File

@@ -572,12 +572,12 @@ def get_value_state_schema(
return vol.Coerce(bool)
if value.configuration_value_type == ConfigurationValueType.ENUMERATED:
return vol.In({str(int(k)): v for k, v in value.metadata.states.items()})
return vol.In({int(k): v for k, v in value.metadata.states.items()})
return None
if value.metadata.states:
return vol.In({str(int(k)): v for k, v in value.metadata.states.items()})
return vol.In({int(k): v for k, v in value.metadata.states.items()})
return vol.All(
vol.Coerce(int),

View File

@@ -51,8 +51,8 @@ ATTR_TO = "to"
_OPTIONS_SCHEMA_DICT = {
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_COMMAND_CLASS): vol.All(
vol.Coerce(int), vol.In({cc.value: cc.name for cc in CommandClass})
vol.Required(ATTR_COMMAND_CLASS): vol.In(
{cc.value: cc.name for cc in CommandClass}
),
vol.Required(ATTR_PROPERTY): vol.Any(vol.Coerce(int), cv.string),
vol.Optional(ATTR_ENDPOINT): vol.Coerce(int),

View File

@@ -316,11 +316,11 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False):
class FlowType(StrEnum):
"""Flow type supported in `next_flow` of ConfigFlowResult."""
"""Flow type."""
CONFIG_FLOW = "config_flow"
OPTIONS_FLOW = "options_flow"
CONFIG_SUBENTRIES_FLOW = "config_subentries_flow"
# Add other flow types here as needed in the future,
# if we want to support them in the `next_flow` parameter.
def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> None:
@@ -1608,26 +1608,6 @@ class ConfigEntriesFlowManager(
issue_id = f"config_entry_reauth_{flow.handler}_{entry_id}"
ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id)
def _async_validate_next_flow(
self,
result: ConfigFlowResult,
) -> None:
"""Validate `next_flow` in result if provided."""
if (next_flow := result.get("next_flow")) is None:
return
flow_type, flow_id = next_flow
if flow_type not in FlowType:
raise HomeAssistantError(f"Invalid flow type: {flow_type}")
if flow_type == FlowType.CONFIG_FLOW:
# Raises UnknownFlow if the flow does not exist.
self.hass.config_entries.flow.async_get(flow_id)
if flow_type == FlowType.OPTIONS_FLOW:
# Raises UnknownFlow if the flow does not exist.
self.hass.config_entries.options.async_get(flow_id)
if flow_type == FlowType.CONFIG_SUBENTRIES_FLOW:
# Raises UnknownFlow if the flow does not exist.
self.hass.config_entries.subentries.async_get(flow_id)
async def async_finish_flow(
self,
flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult],
@@ -1676,8 +1656,6 @@ class ConfigEntriesFlowManager(
self.config_entries.async_update_entry(
entry, discovery_keys=new_discovery_keys
)
self._async_validate_next_flow(result)
return result
# Mark the step as done.
@@ -1792,10 +1770,6 @@ class ConfigEntriesFlowManager(
self.config_entries._async_clean_up(existing_entry) # noqa: SLF001
result["result"] = entry
if not existing_entry:
result = await flow.async_on_create_entry(result)
self._async_validate_next_flow(result)
return result
async def async_create_flow(
@@ -3317,10 +3291,7 @@ class ConfigFlow(ConfigEntryBaseFlow):
return
flow_type, flow_id = next_flow
if flow_type != FlowType.CONFIG_FLOW:
raise HomeAssistantError(
"next_flow only supports FlowType.CONFIG_FLOW; "
"use async_on_create_entry for options or subentry flows"
)
raise HomeAssistantError("Invalid next_flow type")
# Raises UnknownFlow if the flow does not exist.
self.hass.config_entries.flow.async_get(flow_id)
result["next_flow"] = next_flow
@@ -3341,15 +3312,6 @@ class ConfigFlow(ConfigEntryBaseFlow):
self._async_set_next_flow_if_valid(result, next_flow)
return result
async def async_on_create_entry(self, result: ConfigFlowResult) -> ConfigFlowResult:
"""Runs after a config flow has created a config entry.
Can be overridden by integrations to add additional data to the result.
Example: creating next flow entries to the result which needs a
config entry created before it can start.
"""
return result
@callback
def async_create_entry( # type: ignore[override]
self,

View File

@@ -544,9 +544,8 @@ class HomeAssistant:
) -> None:
"""Add a job to be executed by the event loop or by an executor.
If the job is a coroutine, coroutine function, or decorated with
@callback, it will be run by the event loop, if not it will be run
by an executor.
If the job is either a coroutine or decorated with @callback, it will be
run by the event loop, if not it will be run by an executor.
target: target to call.
args: parameters for method to call.
@@ -558,14 +557,6 @@ class HomeAssistant:
functools.partial(self.async_create_task, target, eager_start=True)
)
return
# For @callback targets, schedule directly via call_soon_threadsafe
# to avoid the extra deferral through _async_add_hass_job + call_soon.
# Check iscoroutinefunction to gracefully handle incorrectly labeled @callback functions.
if is_callback_check_partial(target) and not inspect.iscoroutinefunction(
target
):
self.loop.call_soon_threadsafe(target, *args)
return
self.loop.call_soon_threadsafe(
functools.partial(self._async_add_hass_job, HassJob(target), *args)
)
@@ -607,9 +598,8 @@ class HomeAssistant:
) -> asyncio.Future[_R] | None:
"""Add a job to be executed by the event loop or by an executor.
If the job is a coroutine, coroutine function, or decorated with
@callback, it will be run by the event loop, if not it will be run
by an executor.
If the job is either a coroutine or decorated with @callback, it will be
run by the event loop, if not it will be run by an executor.
This method must be run in the event loop.

View File

@@ -31,6 +31,7 @@ class EntityPlatforms(StrEnum):
IMAGE_PROCESSING = "image_processing"
INFRARED = "infrared"
LAWN_MOWER = "lawn_mower"
RADIO_FREQUENCY = "radio_frequency"
LIGHT = "light"
LOCK = "lock"
MEDIA_PLAYER = "media_player"

View File

@@ -7,7 +7,6 @@ import asyncio
from collections import defaultdict
from collections.abc import Callable, Coroutine, Iterable, Mapping
from dataclasses import dataclass, field
from datetime import timedelta
import functools
import inspect
import logging
@@ -32,7 +31,6 @@ from homeassistant.const import (
CONF_ENABLED,
CONF_ENTITY_ID,
CONF_EVENT_DATA,
CONF_FOR,
CONF_ID,
CONF_OPTIONS,
CONF_PLATFORM,
@@ -76,7 +74,6 @@ from .automation import (
get_relative_description_key,
move_options_fields_to_top_level,
)
from .event import async_track_same_state
from .integration_platform import async_process_integration_platforms
from .selector import (
NumericThresholdMode,
@@ -343,7 +340,6 @@ ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST = ENTITY_STATE_TRIGGER_SCHEMA.extend(
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
[BEHAVIOR_FIRST, BEHAVIOR_LAST, BEHAVIOR_ANY]
),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
},
}
)
@@ -353,9 +349,6 @@ class EntityTriggerBase(Trigger):
"""Trigger for entity state changes."""
_domain_specs: Mapping[str, DomainSpec]
_excluded_states: Final[frozenset[str]] = frozenset(
{STATE_UNAVAILABLE, STATE_UNKNOWN}
)
_schema: vol.Schema = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST
@override
@@ -372,7 +365,6 @@ class EntityTriggerBase(Trigger):
if TYPE_CHECKING:
assert config.target is not None
self._options = config.options or {}
self._duration: timedelta | None = self._options.get(CONF_FOR)
self._target = config.target
def entity_filter(self, entities: set[str]) -> set[str]:
@@ -400,16 +392,17 @@ class EntityTriggerBase(Trigger):
self.is_valid_state(state)
for entity_id in entity_ids
if (state := self._hass.states.get(entity_id)) is not None
and state.state not in self._excluded_states
)
def count_matches(self, entity_ids: set[str]) -> int:
"""Count the number of entity states that match."""
return sum(
self.is_valid_state(state)
for entity_id in entity_ids
if (state := self._hass.states.get(entity_id)) is not None
and state.state not in self._excluded_states
def check_one_match(self, entity_ids: set[str]) -> bool:
"""Check that only one entity state matches."""
return (
sum(
self.is_valid_state(state)
for entity_id in entity_ids
if (state := self._hass.states.get(entity_id)) is not None
)
== 1
)
@override
@@ -418,8 +411,7 @@ class EntityTriggerBase(Trigger):
) -> CALLBACK_TYPE:
"""Attach the trigger to an action runner."""
behavior: str = self._options.get(ATTR_BEHAVIOR, BEHAVIOR_ANY)
unsub_track_same: dict[str, Callable[[], None]] = {}
behavior = self._options.get(ATTR_BEHAVIOR)
@callback
def state_change_listener(
@@ -431,30 +423,6 @@ class EntityTriggerBase(Trigger):
from_state = event.data["old_state"]
to_state = event.data["new_state"]
def state_still_valid(
_: str, from_state: State | None, to_state: State | None
) -> bool:
"""Check if the state is still valid during the duration wait.
Called by async_track_same_state on each state change to
determine whether to cancel the timer.
For behavior any, checks the individual entity's state.
For behavior first/last, checks the combined state.
"""
if behavior == BEHAVIOR_LAST:
return self.check_all_match(
target_state_change_data.targeted_entity_ids
)
if behavior == BEHAVIOR_FIRST:
return (
self.count_matches(target_state_change_data.targeted_entity_ids)
>= 1
)
# Behavior any: check the individual entity's state
if not to_state:
return False
return self.is_valid_state(to_state)
if not from_state or not to_state:
return
@@ -472,65 +440,25 @@ class EntityTriggerBase(Trigger):
):
return
elif behavior == BEHAVIOR_FIRST:
# Note: It's enough to test for exactly 1 match here because if there
# were previously 2 matches the transition would not be valid and we
# would have returned already.
if (
self.count_matches(target_state_change_data.targeted_entity_ids)
!= 1
if not self.check_one_match(
target_state_change_data.targeted_entity_ids
):
return
@callback
def call_action() -> None:
"""Call action with right context."""
# After a `for` delay, keep the original triggering event payload.
# `async_track_same_state` only verifies the state remained valid
# for the configured duration before firing the action.
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
"for": self._duration,
},
f"state of {entity_id}",
event.context,
)
if not self._duration:
call_action()
return
subscription_key = entity_id if behavior == BEHAVIOR_ANY else behavior
if subscription_key in unsub_track_same:
unsub_track_same.pop(subscription_key)()
unsub_track_same[subscription_key] = async_track_same_state(
self._hass,
self._duration,
call_action,
state_still_valid,
entity_ids=(
entity_id
if behavior == BEHAVIOR_ANY
else target_state_change_data.targeted_entity_ids
),
run_action(
{
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
},
f"state of {entity_id}",
event.context,
)
unsub = async_track_target_selector_state_change_event(
return async_track_target_selector_state_change_event(
self._hass, self._target, state_change_listener, self.entity_filter
)
@callback
def async_remove() -> None:
"""Remove state listeners async."""
unsub()
for async_remove in unsub_track_same.values():
async_remove()
unsub_track_same.clear()
return async_remove
class EntityTargetStateTriggerBase(EntityTriggerBase):
"""Trigger for entity state changes to a specific state.

View File

@@ -15,7 +15,7 @@ astral==2.2
async-interrupt==1.2.2
async-upnp-client==0.46.2
atomicwrites-homeassistant==1.4.1
attrs==26.1.0
attrs==25.4.0
audioop-lts==0.2.1
av==16.0.1
awesomeversion==25.8.0
@@ -32,7 +32,7 @@ cronsim==2.7
cryptography==46.0.7
dbus-fast==4.0.4
file-read-backwards==2.0.0
fnv-hash-fast==2.0.2
fnv-hash-fast==2.0.0
go2rtc-client==0.4.0
ha-ffmpeg==3.2.2
habluetooth==6.0.0
@@ -47,7 +47,7 @@ Jinja2==3.1.6
lru-dict==1.3.0
mutagen==1.47.0
openai==2.21.0
orjson==3.11.8
orjson==3.11.7
packaging>=23.1
paho-mqtt==2.1.0
Pillow==12.2.0
@@ -57,21 +57,21 @@ PyJWT==2.10.1
pymicro-vad==1.0.1
PyNaCl==1.6.2
pyOpenSSL==26.0.0
pyserial==3.5
pyspeex-noise==1.0.2
python-slugify==8.0.4
PyTurboJPEG==1.8.3
PyTurboJPEG==1.8.0
PyYAML==6.0.3
requests==2.33.1
securetar==2026.4.1
serialx==1.2.2
SQLAlchemy==2.0.49
SQLAlchemy==2.0.41
standard-aifc==3.13.0
standard-telnetlib==3.13.0
typing-extensions>=4.15.0,<5.0
ulid-transform==2.2.0
urllib3>=2.0
uv==0.11.1
voluptuous-openapi==0.3.0
voluptuous-openapi==0.2.0
voluptuous-serialize==2.7.0
voluptuous==0.15.2
webrtc-models==0.3.0

View File

@@ -36,7 +36,7 @@ dependencies = [
"annotatedyaml==1.0.2",
"astral==2.2",
"async-interrupt==1.2.2",
"attrs==26.1.0",
"attrs==25.4.0",
"atomicwrites-homeassistant==1.4.1",
"audioop-lts==0.2.1",
"awesomeversion==25.8.0",
@@ -44,7 +44,7 @@ dependencies = [
"certifi>=2021.5.30",
"ciso8601==2.3.3",
"cronsim==2.7",
"fnv-hash-fast==2.0.2",
"fnv-hash-fast==2.0.0",
# hass-nabucasa is imported by helpers which don't depend on the cloud
# integration
"hass-nabucasa==2.2.0",
@@ -61,14 +61,14 @@ dependencies = [
"Pillow==12.2.0",
"propcache==0.4.1",
"pyOpenSSL==26.0.0",
"orjson==3.11.8",
"orjson==3.11.7",
"packaging>=23.1",
"psutil-home-assistant==0.0.1",
"python-slugify==8.0.4",
"PyYAML==6.0.3",
"requests==2.33.1",
"securetar==2026.4.1",
"SQLAlchemy==2.0.49",
"SQLAlchemy==2.0.41",
"standard-aifc==3.13.0",
"standard-telnetlib==3.13.0",
"typing-extensions>=4.15.0,<5.0",
@@ -77,7 +77,7 @@ dependencies = [
"uv==0.11.1",
"voluptuous==0.15.2",
"voluptuous-serialize==2.7.0",
"voluptuous-openapi==0.3.0",
"voluptuous-openapi==0.2.0",
"yarl==1.23.0",
"webrtc-models==0.3.0",
"zeroconf==0.148.0",

12
requirements.txt generated
View File

@@ -14,7 +14,7 @@ annotatedyaml==1.0.2
astral==2.2
async-interrupt==1.2.2
atomicwrites-homeassistant==1.4.1
attrs==26.1.0
attrs==25.4.0
audioop-lts==0.2.1
awesomeversion==25.8.0
bcrypt==5.0.0
@@ -22,7 +22,7 @@ certifi>=2021.5.30
ciso8601==2.3.3
cronsim==2.7
cryptography==46.0.7
fnv-hash-fast==2.0.2
fnv-hash-fast==2.0.0
ha-ffmpeg==3.2.2
hass-nabucasa==2.2.0
hassil==3.5.0
@@ -34,7 +34,7 @@ infrared-protocols==1.1.0
Jinja2==3.1.6
lru-dict==1.3.0
mutagen==1.47.0
orjson==3.11.8
orjson==3.11.7
packaging>=23.1
Pillow==12.2.0
propcache==0.4.1
@@ -44,18 +44,18 @@ pymicro-vad==1.0.1
pyOpenSSL==26.0.0
pyspeex-noise==1.0.2
python-slugify==8.0.4
PyTurboJPEG==1.8.3
PyTurboJPEG==1.8.0
PyYAML==6.0.3
requests==2.33.1
securetar==2026.4.1
SQLAlchemy==2.0.49
SQLAlchemy==2.0.41
standard-aifc==3.13.0
standard-telnetlib==3.13.0
typing-extensions>=4.15.0,<5.0
ulid-transform==2.2.0
urllib3>=2.0
uv==0.11.1
voluptuous-openapi==0.3.0
voluptuous-openapi==0.2.0
voluptuous-serialize==2.7.0
voluptuous==0.15.2
webrtc-models==0.3.0

21
requirements_all.txt generated
View File

@@ -96,7 +96,7 @@ PyTransportNSW==0.1.1
# homeassistant.components.camera
# homeassistant.components.stream
PyTurboJPEG==1.8.3
PyTurboJPEG==1.8.0
# homeassistant.components.vicare
PyViCare==2.59.0
@@ -115,7 +115,7 @@ RtmAPI==0.7.2
# homeassistant.components.recorder
# homeassistant.components.sql
SQLAlchemy==2.0.49
SQLAlchemy==2.0.41
# homeassistant.components.tami4
Tami4EdgeAPI==3.0
@@ -1006,7 +1006,7 @@ flux-led==1.2.0
# homeassistant.components.homekit
# homeassistant.components.recorder
fnv-hash-fast==2.0.2
fnv-hash-fast==2.0.0
# homeassistant.components.foobot
foobot_async==1.0.0
@@ -2465,6 +2465,13 @@ pysensibo==1.2.1
# homeassistant.components.senz
pysenz==1.0.2
# homeassistant.components.serial
pyserial-asyncio-fast==0.16
# homeassistant.components.acer_projector
# homeassistant.components.usb
pyserial==3.5
# homeassistant.components.sesame
pysesame2==1.0.1
@@ -2824,6 +2831,9 @@ renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.19.1
# homeassistant.components.radio_frequency
rf-protocols==0.0.1
# homeassistant.components.idteck_prox
rfk101py==0.0.1
@@ -2921,10 +2931,7 @@ sentence-stream==1.2.0
# homeassistant.components.sentry
sentry-sdk==2.48.0
# homeassistant.components.acer_projector
# homeassistant.components.homeassistant_hardware
# homeassistant.components.serial
# homeassistant.components.usb
# homeassistant.components.zha
serialx==1.2.2
@@ -3159,7 +3166,7 @@ tuya-device-handlers==0.0.17
tuya-device-sharing-sdk==0.2.8
# homeassistant.components.twentemilieu
twentemilieu==3.0.0
twentemilieu==2.2.1
# homeassistant.components.twilio
twilio==6.32.0

View File

@@ -8,7 +8,7 @@
-c homeassistant/package_constraints.txt
-r requirements_test_pre_commit.txt
astroid==4.0.4
coverage==7.13.5
coverage==7.10.6
freezegun==1.5.5
# librt is an internal mypy dependency
librt==0.8.1
@@ -22,18 +22,18 @@ pylint-per-file-ignores==3.2.1
pipdeptree==2.26.1
pytest-asyncio==1.3.0
pytest-aiohttp==1.1.0
pytest-cov==7.1.0
pytest-cov==7.0.0
pytest-freezer==0.4.9
pytest-github-actions-annotate-failures==0.4.0
pytest-github-actions-annotate-failures==0.3.0
pytest-socket==0.7.0
pytest-sugar==1.1.1
pytest-sugar==1.0.0
pytest-timeout==2.4.0
pytest-unordered==0.7.0
pytest-picked==0.5.1
pytest-xdist==3.8.0
pytest==9.0.3
requests-mock==1.12.1
respx==0.23.1
respx==0.22.0
syrupy==5.1.0
tqdm==4.67.1
types-aiofiles==24.1.0.20250822

View File

@@ -93,7 +93,7 @@ PyTransportNSW==0.1.1
# homeassistant.components.camera
# homeassistant.components.stream
PyTurboJPEG==1.8.3
PyTurboJPEG==1.8.0
# homeassistant.components.vicare
PyViCare==2.59.0
@@ -112,7 +112,7 @@ RtmAPI==0.7.2
# homeassistant.components.recorder
# homeassistant.components.sql
SQLAlchemy==2.0.49
SQLAlchemy==2.0.41
# homeassistant.components.tami4
Tami4EdgeAPI==3.0
@@ -894,7 +894,7 @@ flux-led==1.2.0
# homeassistant.components.homekit
# homeassistant.components.recorder
fnv-hash-fast==2.0.2
fnv-hash-fast==2.0.0
# homeassistant.components.foobot
foobot_async==1.0.0
@@ -2109,6 +2109,10 @@ pysensibo==1.2.1
# homeassistant.components.senz
pysenz==1.0.2
# homeassistant.components.acer_projector
# homeassistant.components.usb
pyserial==3.5
# homeassistant.components.seventeentrack
pyseventeentrack==1.1.3
@@ -2405,6 +2409,9 @@ renson-endura-delta==1.7.2
# homeassistant.components.reolink
reolink-aio==0.19.1
# homeassistant.components.radio_frequency
rf-protocols==0.0.1
# homeassistant.components.rflink
rflink==0.0.67
@@ -2481,10 +2488,7 @@ sentence-stream==1.2.0
# homeassistant.components.sentry
sentry-sdk==2.48.0
# homeassistant.components.acer_projector
# homeassistant.components.homeassistant_hardware
# homeassistant.components.serial
# homeassistant.components.usb
# homeassistant.components.zha
serialx==1.2.2
@@ -2674,7 +2678,7 @@ tuya-device-handlers==0.0.17
tuya-device-sharing-sdk==0.2.8
# homeassistant.components.twentemilieu
twentemilieu==3.0.0
twentemilieu==2.2.1
# homeassistant.components.twilio
twilio==6.32.0

View File

@@ -1,6 +1,6 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
codespell==2.4.2
codespell==2.4.1
ruff==0.15.1
yamllint==1.38.0
zizmor==1.23.1

View File

@@ -34,23 +34,25 @@ ENV \
UV_SYSTEM_PYTHON=true \
UV_NO_CACHE=true
WORKDIR /usr/src
# Home Assistant S6-Overlay
COPY rootfs /
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc@sha256:{go2rtc} /usr/local/bin/go2rtc /bin/go2rtc
RUN \
# Verify go2rtc can be executed
go2rtc --version \
# Install uv
&& pip3 install uv=={uv}
WORKDIR /usr/src
## Setup Home Assistant Core dependencies
COPY requirements.txt homeassistant/
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
RUN \
# Verify go2rtc can be executed
go2rtc --version \
# Install uv at the version pinned in the requirements file
&& pip3 install --no-cache-dir "uv==$(awk -F'==' '/^uv==/{{print $2}}' homeassistant/requirements.txt)" \
&& uv pip install \
uv pip install \
--no-build \
-r homeassistant/requirements.txt
@@ -141,12 +143,12 @@ WORKDIR "/github/workspace"
COPY . /usr/src/homeassistant
# Uv creates a lock file in /tmp
RUN --mount=type=tmpfs,target=/tmp \
# Uv is only needed during build
RUN --mount=from=ghcr.io/astral-sh/uv:{uv},source=/uv,target=/bin/uv \
# Uv creates a lock file in /tmp
--mount=type=tmpfs,target=/tmp \
# Required for PyTurboJPEG
apk add --no-cache libturbojpeg \
# Install uv at the version pinned in the requirements file
&& pip install --no-cache-dir "uv==$(awk -F'==' '/^uv==/{{print $2}}' /usr/src/homeassistant/requirements.txt)" \
&& uv pip install \
--no-build \
--no-cache \
@@ -215,7 +217,8 @@ def _generate_files(config: Config) -> list[File]:
+ 10
) * 1000
package_versions = _get_package_versions(
package_versions = _get_package_versions(config.root / "requirements.txt", {"uv"})
package_versions |= _get_package_versions(
config.root / "requirements_test.txt", {"pipdeptree", "tqdm"}
)
package_versions |= _get_package_versions(

View File

@@ -13,12 +13,12 @@ WORKDIR "/github/workspace"
COPY . /usr/src/homeassistant
# Uv creates a lock file in /tmp
RUN --mount=type=tmpfs,target=/tmp \
# Uv is only needed during build
RUN --mount=from=ghcr.io/astral-sh/uv:0.11.1,source=/uv,target=/bin/uv \
# Uv creates a lock file in /tmp
--mount=type=tmpfs,target=/tmp \
# Required for PyTurboJPEG
apk add --no-cache libturbojpeg \
# Install uv at the version pinned in the requirements file
&& pip install --no-cache-dir "uv==$(awk -F'==' '/^uv==/{print $2}' /usr/src/homeassistant/requirements.txt)" \
&& uv pip install \
--no-build \
--no-cache \

View File

@@ -252,12 +252,6 @@ FORBIDDEN_PACKAGE_FILES_EXCEPTIONS = {
"coinbase": {"homeassistant": {"coinbase-advanced-py"}},
# https://github.com/u9n/dlms-cosem
"dsmr": {"dsmr-parser": {"dlms-cosem"}},
# https://github.com/tkdrob/pyefergy
# pyefergy declares codecov as a runtime dependency, which pulls in
# coverage; coverage ships an 'a1_coverage.pth' file starting from
# 7.13.x. Upstream fix pending in
# https://github.com/tkdrob/pyefergy/pull/47
"efergy": {"codecov": {"coverage"}},
# https://github.com/ChrisMandich/PyFlume # Fixed with >=0.7.1
"fitbit": {
# Setuptools - distutils-precedence.pth

View File

@@ -41,6 +41,7 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Arcam FMJ (127.0.0.1)',
'last_non_buffering_state': <MediaPlayerState.ON: 'on'>,
'supported_features': <MediaPlayerEntityFeature: 200588>,
'volume_level': 0.0,
}),
@@ -94,6 +95,7 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Arcam FMJ (127.0.0.1) Zone 2',
'last_non_buffering_state': <MediaPlayerState.ON: 'on'>,
'supported_features': <MediaPlayerEntityFeature: 135052>,
'volume_level': 0.0,
}),

View File

@@ -63,6 +63,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': 'playing',
'media_content_type': 'music',
'repeat': 'off',
'shuffle': False,
@@ -185,6 +186,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'media_player.beosound_a5_44444444',
]),
'last_non_buffering_state': 'playing',
'media_content_type': 'music',
'repeat': 'off',
'shuffle': False,

View File

@@ -23,6 +23,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -71,6 +72,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -120,6 +122,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -169,6 +172,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -218,6 +222,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -267,6 +272,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -315,6 +321,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -363,6 +370,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -411,6 +419,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -459,6 +468,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -507,6 +517,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -555,6 +566,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -603,6 +615,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -652,6 +665,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -701,6 +715,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -750,6 +765,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -799,6 +815,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'media_position': 0,
'repeat': <RepeatMode.OFF: 'off'>,
@@ -849,6 +866,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -898,6 +916,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <BeoMediaType.TIDAL: 'tidal'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -947,6 +966,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -996,6 +1016,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -1042,6 +1063,7 @@
'media_player.beoconnect_core_22222222',
'media_player.beosound_balance_11111111',
]),
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,
@@ -1090,6 +1112,7 @@
'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com',
'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com',
]),
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_content_type': <MediaType.MUSIC: 'music'>,
'repeat': <RepeatMode.OFF: 'off'>,
'shuffle': False,

View File

@@ -5,6 +5,7 @@
'friendly_name': 'player-name1111',
'group_members': None,
'is_volume_muted': False,
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'master': False,
'media_album_name': 'album',
'media_artist': 'artist',

View File

@@ -54,8 +54,6 @@ async def target_climates(hass: HomeAssistant) -> dict[str, list[str]]:
"climate.target_temperature_crossed_threshold",
"climate.turned_off",
"climate.turned_on",
"climate.started_cooling",
"climate.started_drying",
"climate.started_heating",
],
)

View File

@@ -46,6 +46,7 @@
'device_class': 'tv',
'friendly_name': 'Living Room',
'is_volume_muted': False,
'last_non_buffering_state': <MediaPlayerState.ON: 'on'>,
'source_list': list([
'TV',
]),

View File

@@ -0,0 +1,211 @@
"""Test ESPHome radio frequency platform."""
from aioesphomeapi import (
APIClient,
APIConnectionError,
RadioFrequencyCapability,
RadioFrequencyInfo,
RadioFrequencyModulation,
)
import pytest
from rf_protocols import ModulationType, OOKCommand, Timing
from homeassistant.components import radio_frequency
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from .conftest import MockESPHomeDevice, MockESPHomeDeviceType
ENTITY_ID = "radio_frequency.test_rf"
async def _mock_rf_device(
mock_esphome_device: MockESPHomeDeviceType,
mock_client: APIClient,
capabilities: RadioFrequencyCapability = RadioFrequencyCapability.TRANSMITTER,
frequency_min: int = 433_000_000,
frequency_max: int = 434_000_000,
supported_modulations: int = 1,
) -> MockESPHomeDevice:
entity_info = [
RadioFrequencyInfo(
object_id="rf",
key=1,
name="RF",
capabilities=capabilities,
frequency_min=frequency_min,
frequency_max=frequency_max,
supported_modulations=supported_modulations,
)
]
return await mock_esphome_device(
mock_client=mock_client, entity_info=entity_info, states=[]
)
@pytest.mark.parametrize(
("capabilities", "entity_created"),
[
(RadioFrequencyCapability.TRANSMITTER, True),
(RadioFrequencyCapability.RECEIVER, False),
(
RadioFrequencyCapability.TRANSMITTER | RadioFrequencyCapability.RECEIVER,
True,
),
(RadioFrequencyCapability(0), False),
],
)
async def test_radio_frequency_entity_transmitter(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
capabilities: RadioFrequencyCapability,
entity_created: bool,
) -> None:
"""Test radio frequency entity with transmitter capability is created."""
await _mock_rf_device(mock_esphome_device, mock_client, capabilities)
state = hass.states.get(ENTITY_ID)
assert (state is not None) == entity_created
async def test_radio_frequency_multiple_entities_mixed_capabilities(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test multiple radio frequency entities with mixed capabilities."""
entity_info = [
RadioFrequencyInfo(
object_id="rf_transmitter",
key=1,
name="RF Transmitter",
capabilities=RadioFrequencyCapability.TRANSMITTER,
),
RadioFrequencyInfo(
object_id="rf_receiver",
key=2,
name="RF Receiver",
capabilities=RadioFrequencyCapability.RECEIVER,
),
RadioFrequencyInfo(
object_id="rf_transceiver",
key=3,
name="RF Transceiver",
capabilities=(
RadioFrequencyCapability.TRANSMITTER | RadioFrequencyCapability.RECEIVER
),
),
]
await mock_esphome_device(
mock_client=mock_client,
entity_info=entity_info,
states=[],
)
# Only transmitter and transceiver should be created
assert hass.states.get("radio_frequency.test_rf_transmitter") is not None
assert hass.states.get("radio_frequency.test_rf_receiver") is None
assert hass.states.get("radio_frequency.test_rf_transceiver") is not None
async def test_radio_frequency_send_command_success(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test sending RF command successfully."""
await _mock_rf_device(mock_esphome_device, mock_client)
command = OOKCommand(
frequency=433_920_000,
timings=[
Timing(high_us=350, low_us=1050),
Timing(high_us=350, low_us=350),
],
)
await radio_frequency.async_send_command(hass, ENTITY_ID, command)
mock_client.radio_frequency_transmit_raw_timings.assert_called_once()
call_args = mock_client.radio_frequency_transmit_raw_timings.call_args
assert call_args[0][0] == 1 # key
assert call_args[1]["frequency"] == 433_920_000
assert call_args[1]["modulation"] == RadioFrequencyModulation.OOK
assert call_args[1]["repeat_count"] == 1
assert call_args[1]["device_id"] == 0
assert call_args[1]["timings"] == [350, -1050, 350, -350]
async def test_radio_frequency_send_command_failure(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test sending RF command with APIConnectionError raises HomeAssistantError."""
await _mock_rf_device(mock_esphome_device, mock_client)
mock_client.radio_frequency_transmit_raw_timings.side_effect = APIConnectionError(
"Connection lost"
)
command = OOKCommand(
frequency=433_920_000,
timings=[Timing(high_us=350, low_us=1050)],
)
with pytest.raises(HomeAssistantError) as exc_info:
await radio_frequency.async_send_command(hass, ENTITY_ID, command)
assert exc_info.value.translation_domain == "esphome"
assert exc_info.value.translation_key == "error_communicating_with_device"
async def test_radio_frequency_entity_availability(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test radio frequency entity becomes available after device reconnects."""
mock_device = await _mock_rf_device(mock_esphome_device, mock_client)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state != STATE_UNAVAILABLE
await mock_device.mock_disconnect(False)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == STATE_UNAVAILABLE
await mock_device.mock_connect()
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state != STATE_UNAVAILABLE
async def test_radio_frequency_supported_frequency_ranges(
hass: HomeAssistant,
mock_client: APIClient,
mock_esphome_device: MockESPHomeDeviceType,
) -> None:
"""Test supported frequency ranges are exposed from device info."""
await _mock_rf_device(
mock_esphome_device,
mock_client,
frequency_min=433_000_000,
frequency_max=434_000_000,
)
transmitters = radio_frequency.async_get_transmitters(
hass, 433_920_000, ModulationType.OOK
)
assert len(transmitters) == 1
transmitters = radio_frequency.async_get_transmitters(
hass, 868_000_000, ModulationType.OOK
)
assert len(transmitters) == 0

View File

@@ -9,12 +9,12 @@ from homeassistant.core import HomeAssistant
from tests.components.common import (
ConditionStateDescription,
assert_condition_behavior_all,
assert_condition_behavior_any,
assert_condition_gated_by_labs_flag,
create_target_condition,
parametrize_condition_states_all,
parametrize_condition_states_any,
parametrize_target_entities,
set_or_remove_state,
target_entities,
)
@@ -22,7 +22,17 @@ from tests.components.common import (
@pytest.fixture
async def target_fans(hass: HomeAssistant) -> dict[str, list[str]]:
"""Create multiple fan entities associated with different targets."""
return await target_entities(hass, "fan", domain_excluded="switch")
return await target_entities(hass, "fan")
@pytest.fixture
async def target_switches(hass: HomeAssistant) -> dict[str, list[str]]:
"""Create multiple switch entities associated with different targets.
Note: The switches are used to ensure that only fan entities are considered
in the condition evaluation and not other toggle entities.
"""
return await target_entities(hass, "switch")
@pytest.mark.parametrize(
@@ -51,19 +61,18 @@ async def test_fan_conditions_gated_by_labs_flag(
condition="fan.is_on",
target_states=[STATE_ON],
other_states=[STATE_OFF],
excluded_entities_from_other_domain=True,
),
*parametrize_condition_states_any(
condition="fan.is_off",
target_states=[STATE_OFF],
other_states=[STATE_ON],
excluded_entities_from_other_domain=True,
),
],
)
async def test_fan_state_condition_behavior_any(
hass: HomeAssistant,
target_fans: dict[str, list[str]],
target_switches: dict[str, list[str]],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
@@ -72,17 +81,39 @@ async def test_fan_state_condition_behavior_any(
states: list[ConditionStateDescription],
) -> None:
"""Test the fan state condition with the 'any' behavior."""
await assert_condition_behavior_any(
other_entity_ids = set(target_fans["included_entities"]) - {entity_id}
# Set all fans, including the tested fan, to the initial state
for eid in target_fans["included_entities"]:
set_or_remove_state(hass, eid, states[0]["included_state"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
target_entities=target_fans,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
target=condition_target_config,
behavior="any",
)
# Set state for switches to ensure that they don't impact the condition
for state in states:
for eid in target_switches["included_entities"]:
set_or_remove_state(hass, eid, state["included_state"])
await hass.async_block_till_done()
assert condition(hass) is False
for state in states:
included_state = state["included_state"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
# Check if changing other fans also passes the condition
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
@@ -96,13 +127,11 @@ async def test_fan_state_condition_behavior_any(
condition="fan.is_on",
target_states=[STATE_ON],
other_states=[STATE_OFF],
excluded_entities_from_other_domain=True,
),
*parametrize_condition_states_all(
condition="fan.is_off",
target_states=[STATE_OFF],
other_states=[STATE_ON],
excluded_entities_from_other_domain=True,
),
],
)
@@ -117,13 +146,33 @@ async def test_fan_state_condition_behavior_all(
states: list[ConditionStateDescription],
) -> None:
"""Test the fan state condition with the 'all' behavior."""
await assert_condition_behavior_all(
# Set state for two switches to ensure that they don't impact the condition
hass.states.async_set("switch.label_switch_1", STATE_OFF)
hass.states.async_set("switch.label_switch_2", STATE_ON)
other_entity_ids = set(target_fans["included_entities"]) - {entity_id}
# Set all fans, including the tested fan, to the initial state
for eid in target_fans["included_entities"]:
set_or_remove_state(hass, eid, states[0]["included_state"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
target_entities=target_fans,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
target=condition_target_config,
behavior="all",
)
for state in states:
included_state = state["included_state"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true_first_entity"]
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]

View File

@@ -489,7 +489,7 @@ LIGHT_ATTRS = [
]
LOCK_ATTRS = [{"supported_features": 1}, {}]
NOTIFY_ATTRS = [{"supported_features": 0}, {}]
MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {}]
MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {"last_non_buffering_state": "on"}]
SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"}]
VALVE_ATTRS = [{"supported_features": 0}, {"is_closed": False}]

View File

@@ -1,5 +1,6 @@
"""Tests for the HDMI-CEC media player platform."""
from collections.abc import Callable
from typing import Any
from pycec.const import (
@@ -57,6 +58,39 @@ from homeassistant.core import HomeAssistant
from . import MockHDMIDevice, assert_key_press_release
from .conftest import CecEntityCreator, HDMINetworkCreator
type AssertState = Callable[[str, str], None]
@pytest.fixture(
name="assert_state",
params=[
False,
pytest.param(
True,
marks=pytest.mark.xfail(
reason="""State isn't updated because the function is missing the
`schedule_update_ha_state` for a correct push entity. Would still
update once the data comes back from the device."""
),
),
],
ids=["skip_assert_state", "run_assert_state"],
)
def assert_state_fixture(request: pytest.FixtureRequest) -> AssertState:
"""Allow for skipping the assert state changes.
This is broken in this entity, but we still want to test that
the rest of the code works as expected.
"""
def _test_state(state: str, expected: str) -> None:
if request.param:
assert state == expected
else:
assert True
return _test_state
async def test_load_platform(
hass: HomeAssistant,
@@ -108,6 +142,7 @@ async def test_service_on(
hass: HomeAssistant,
create_hdmi_network: HDMINetworkCreator,
create_cec_entity: CecEntityCreator,
assert_state: AssertState,
) -> None:
"""Test that media_player triggers on `on` service."""
hdmi_network = await create_hdmi_network({"platform": "media_player"})
@@ -122,17 +157,19 @@ async def test_service_on(
{ATTR_ENTITY_ID: "media_player.hdmi_3"},
blocking=True,
)
await hass.async_block_till_done()
mock_hdmi_device.turn_on.assert_called_once_with()
state = hass.states.get("media_player.hdmi_3")
assert state.state == STATE_ON
assert_state(state.state, STATE_ON)
async def test_service_off(
hass: HomeAssistant,
create_hdmi_network: HDMINetworkCreator,
create_cec_entity: CecEntityCreator,
assert_state: AssertState,
) -> None:
"""Test that media_player triggers on `off` service."""
hdmi_network = await create_hdmi_network({"platform": "media_player"})
@@ -151,7 +188,7 @@ async def test_service_off(
mock_hdmi_device.turn_off.assert_called_once_with()
state = hass.states.get("media_player.hdmi_3")
assert state.state == STATE_OFF
assert_state(state.state, STATE_OFF)
@pytest.mark.parametrize(
@@ -280,6 +317,7 @@ async def test_volume_services(
data,
blocking=True,
)
await hass.async_block_till_done()
assert mock_hdmi_device.send_command.call_count == 2
assert_key_press_release(mock_hdmi_device.send_command, dst=3, key=key)
@@ -310,6 +348,7 @@ async def test_track_change_services(
{ATTR_ENTITY_ID: "media_player.hdmi_3"},
blocking=True,
)
await hass.async_block_till_done()
assert mock_hdmi_device.send_command.call_count == 2
assert_key_press_release(mock_hdmi_device.send_command, dst=3, key=key)
@@ -334,6 +373,7 @@ async def test_playback_services(
hass: HomeAssistant,
create_hdmi_network: HDMINetworkCreator,
create_cec_entity: CecEntityCreator,
assert_state: AssertState,
service: str,
key: int,
expected_state: str,
@@ -349,12 +389,13 @@ async def test_playback_services(
{ATTR_ENTITY_ID: "media_player.hdmi_3"},
blocking=True,
)
await hass.async_block_till_done()
assert mock_hdmi_device.send_command.call_count == 2
assert_key_press_release(mock_hdmi_device.send_command, dst=3, key=key)
state = hass.states.get("media_player.hdmi_3")
assert state.state == expected_state
assert_state(state.state, expected_state)
@pytest.mark.xfail(reason="PLAY feature isn't enabled")
@@ -362,6 +403,7 @@ async def test_play_pause_service(
hass: HomeAssistant,
create_hdmi_network: HDMINetworkCreator,
create_cec_entity: CecEntityCreator,
assert_state: AssertState,
) -> None:
"""Test play pause service."""
hdmi_network = await create_hdmi_network({"platform": "media_player"})
@@ -376,12 +418,13 @@ async def test_play_pause_service(
{ATTR_ENTITY_ID: "media_player.hdmi_3"},
blocking=True,
)
await hass.async_block_till_done()
assert mock_hdmi_device.send_command.call_count == 2
assert_key_press_release(mock_hdmi_device.send_command, dst=3, key=KEY_PAUSE)
state = hass.states.get("media_player.hdmi_3")
assert state.state == STATE_PAUSED
assert_state(state.state, STATE_PAUSED)
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
@@ -389,6 +432,7 @@ async def test_play_pause_service(
{ATTR_ENTITY_ID: "media_player.hdmi_3"},
blocking=True,
)
await hass.async_block_till_done()
assert mock_hdmi_device.send_command.call_count == 4
assert_key_press_release(mock_hdmi_device.send_command, 1, dst=3, key=KEY_PLAY)
@@ -483,6 +527,9 @@ async def test_starting_state(
assert state.state == expected_state
@pytest.mark.xfail(
reason="The code only sets the state to unavailable, doesn't set the `_attr_available` to false."
)
async def test_unavailable_status(
hass: HomeAssistant,
create_hdmi_network: HDMINetworkCreator,
@@ -494,7 +541,6 @@ async def test_unavailable_status(
await create_cec_entity(hdmi_network, mock_hdmi_device)
hass.bus.async_fire(EVENT_HDMI_CEC_UNAVAILABLE)
await hass.async_block_till_done()
state = hass.states.get("media_player.hdmi_3")
assert state.state == STATE_UNAVAILABLE

View File

@@ -314,6 +314,7 @@
'media_player.test_player_2',
]),
'is_volume_muted': False,
'last_non_buffering_state': 'idle',
'media_album_id': '1',
'media_album_name': 'Album',
'media_artist': 'Artist',

View File

@@ -208,6 +208,7 @@
'media_player.test_player_2',
]),
'is_volume_muted': False,
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'media_album_id': '1',
'media_album_name': 'Album',
'media_artist': 'Artist',

View File

@@ -18319,6 +18319,7 @@
'attributes': dict({
'device_class': 'tv',
'friendly_name': 'LG webOS TV AF80',
'last_non_buffering_state': <MediaPlayerState.ON: 'on'>,
'source': 'HDMI 4',
'source_list': list([
'AirPlay',

View File

@@ -43,6 +43,7 @@
'assumed_state': True,
'device_class': 'tv',
'friendly_name': 'LG TV',
'last_non_buffering_state': <MediaPlayerState.ON: 'on'>,
'supported_features': <MediaPlayerEntityFeature: 21945>,
}),
'context': <ANY>,

View File

@@ -13,9 +13,11 @@ from tests.components.common import (
assert_condition_behavior_all,
assert_condition_behavior_any,
assert_condition_gated_by_labs_flag,
create_target_condition,
parametrize_condition_states_all,
parametrize_condition_states_any,
parametrize_target_entities,
set_or_remove_state,
target_entities,
)
@@ -133,7 +135,17 @@ def parametrize_brightness_condition_states_all(
@pytest.fixture
async def target_lights(hass: HomeAssistant) -> dict[str, list[str]]:
"""Create multiple light entities associated with different targets."""
return await target_entities(hass, "light", domain_excluded="switch")
return await target_entities(hass, "light")
@pytest.fixture
async def target_switches(hass: HomeAssistant) -> dict[str, list[str]]:
"""Create multiple switch entities associated with different targets.
Note: The switches are used to ensure that only light entities are considered
in the condition evaluation and not other toggle entities.
"""
return await target_entities(hass, "switch")
@pytest.mark.parametrize(
@@ -163,19 +175,18 @@ async def test_light_conditions_gated_by_labs_flag(
condition="light.is_on",
target_states=[STATE_ON],
other_states=[STATE_OFF],
excluded_entities_from_other_domain=True,
),
*parametrize_condition_states_any(
condition="light.is_off",
target_states=[STATE_OFF],
other_states=[STATE_ON],
excluded_entities_from_other_domain=True,
),
],
)
async def test_light_state_condition_behavior_any(
hass: HomeAssistant,
target_lights: dict[str, list[str]],
target_switches: dict[str, list[str]],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
@@ -184,17 +195,39 @@ async def test_light_state_condition_behavior_any(
states: list[ConditionStateDescription],
) -> None:
"""Test the light state condition with the 'any' behavior."""
await assert_condition_behavior_any(
other_entity_ids = set(target_lights["included_entities"]) - {entity_id}
# Set all lights, including the tested light, to the initial state
for eid in target_lights["included_entities"]:
set_or_remove_state(hass, eid, states[0]["included_state"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
target_entities=target_lights,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
target=condition_target_config,
behavior="any",
)
# Set state for switches to ensure that they don't impact the condition
for state in states:
for eid in target_switches["included_entities"]:
set_or_remove_state(hass, eid, state["included_state"])
await hass.async_block_till_done()
assert condition(hass) is False
for state in states:
included_state = state["included_state"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
# Check if changing other lights also passes the condition
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
@@ -208,13 +241,11 @@ async def test_light_state_condition_behavior_any(
condition="light.is_on",
target_states=[STATE_ON],
other_states=[STATE_OFF],
excluded_entities_from_other_domain=True,
),
*parametrize_condition_states_all(
condition="light.is_off",
target_states=[STATE_OFF],
other_states=[STATE_ON],
excluded_entities_from_other_domain=True,
),
],
)
@@ -229,17 +260,37 @@ async def test_light_state_condition_behavior_all(
states: list[ConditionStateDescription],
) -> None:
"""Test the light state condition with the 'all' behavior."""
await assert_condition_behavior_all(
# Set state for two switches to ensure that they don't impact the condition
hass.states.async_set("switch.label_switch_1", STATE_OFF)
hass.states.async_set("switch.label_switch_2", STATE_ON)
other_entity_ids = set(target_lights["included_entities"]) - {entity_id}
# Set all lights, including the tested light, to the initial state
for eid in target_lights["included_entities"]:
set_or_remove_state(hass, eid, states[0]["included_state"])
await hass.async_block_till_done()
condition = await create_target_condition(
hass,
target_entities=target_lights,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
target=condition_target_config,
behavior="all",
)
for state in states:
included_state = state["included_state"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true_first_entity"]
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
assert condition(hass) == state["condition_true"]
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(

View File

@@ -12,10 +12,12 @@ from homeassistant.components.media_player import (
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_FILTER_CLASSES,
ATTR_MEDIA_SEARCH_QUERY,
DOMAIN,
BrowseMedia,
MediaClass,
MediaPlayerEnqueue,
MediaPlayerEntity,
MediaPlayerState,
SearchMedia,
SearchMediaQuery,
)
@@ -24,11 +26,11 @@ from homeassistant.components.media_player.const import (
SERVICE_SEARCH_MEDIA,
)
from homeassistant.components.websocket_api import TYPE_RESULT
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF
from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, STATE_OFF
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockEntityPlatform
from tests.common import MockEntityPlatform, setup_test_component_platform
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@@ -635,3 +637,62 @@ async def test_play_media_via_selector(hass: HomeAssistant) -> None:
},
blocking=True,
)
async def test_media_player_state(hass: HomeAssistant) -> None:
"""Test that media player state includes last_non_buffering_state."""
entity1 = MediaPlayerEntity()
entity1._attr_name = "test1"
setup_test_component_platform(hass, DOMAIN, [entity1])
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
state = hass.states.get("media_player.test1")
assert state.state == "unknown"
assert state.attributes == {
"friendly_name": "test1",
"last_non_buffering_state": None,
"supported_features": 0,
}
entity1._attr_state = MediaPlayerState.PLAYING
entity1.async_write_ha_state()
state = hass.states.get("media_player.test1")
assert state.state == "playing"
assert state.attributes == {
"friendly_name": "test1",
"last_non_buffering_state": "playing",
"supported_features": 0,
}
# last_non_buffering_state not updated when state is buffering
entity1._attr_state = MediaPlayerState.BUFFERING
entity1.async_write_ha_state()
state = hass.states.get("media_player.test1")
assert state.state == "buffering"
assert state.attributes == {
"friendly_name": "test1",
"last_non_buffering_state": "playing",
"supported_features": 0,
}
entity1._attr_state = MediaPlayerState.PAUSED
entity1.async_write_ha_state()
state = hass.states.get("media_player.test1")
assert state.state == "paused"
assert state.attributes == {
"friendly_name": "test1",
"last_non_buffering_state": "paused",
"supported_features": 0,
}
# last_non_buffering_state not present when unavailable
entity1._attr_available = False
entity1.async_write_ha_state()
state = hass.states.get("media_player.test1")
assert state.state == "unavailable"
assert state.attributes == {
"friendly_name": "test1",
"supported_features": 0,
}

View File

@@ -28,11 +28,7 @@ async def target_media_players(hass: HomeAssistant) -> dict[str, list[str]]:
@pytest.mark.parametrize(
"trigger_key",
[
"media_player.paused_playing",
"media_player.started_playing",
"media_player.stopped_playing",
"media_player.turned_off",
"media_player.turned_on",
],
)
async def test_media_player_triggers_gated_by_labs_flag(
@@ -50,29 +46,6 @@ async def test_media_player_triggers_gated_by_labs_flag(
@pytest.mark.parametrize(
("trigger", "trigger_options", "states"),
[
*parametrize_trigger_states(
trigger="media_player.paused_playing",
target_states=[
MediaPlayerState.PAUSED,
],
other_states=[
MediaPlayerState.BUFFERING,
MediaPlayerState.PLAYING,
],
),
*parametrize_trigger_states(
trigger="media_player.started_playing",
target_states=[
MediaPlayerState.BUFFERING,
MediaPlayerState.PLAYING,
],
other_states=[
MediaPlayerState.IDLE,
MediaPlayerState.OFF,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
],
),
*parametrize_trigger_states(
trigger="media_player.stopped_playing",
target_states=[
@@ -86,32 +59,6 @@ async def test_media_player_triggers_gated_by_labs_flag(
MediaPlayerState.PLAYING,
],
),
*parametrize_trigger_states(
trigger="media_player.turned_off",
target_states=[
MediaPlayerState.OFF,
],
other_states=[
MediaPlayerState.BUFFERING,
MediaPlayerState.IDLE,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
MediaPlayerState.PLAYING,
],
),
*parametrize_trigger_states(
trigger="media_player.turned_on",
target_states=[
MediaPlayerState.BUFFERING,
MediaPlayerState.IDLE,
MediaPlayerState.ON,
MediaPlayerState.PAUSED,
MediaPlayerState.PLAYING,
],
other_states=[
MediaPlayerState.OFF,
],
),
],
)
async def test_media_player_state_trigger_behavior_any(

View File

@@ -2634,7 +2634,7 @@ async def help_test_reload_with_config(
"""Test reloading with supplied config."""
new_yaml_config_file = tmp_path / "configuration.yaml"
def _write_yaml_config() -> str:
def _write_yaml_config() -> None:
new_yaml_config = yaml.dump(config)
new_yaml_config_file.write_text(new_yaml_config)
assert new_yaml_config_file.read_text() == new_yaml_config

View File

@@ -2830,7 +2830,7 @@ async def test_clean_up_registry_monitoring(
}
# Publish it config
# Since it is not enabled_by_default the sensor will not be loaded
# it should register a hook for monitoring the entity registry
# it should register a hook for monitoring the entiry registry
async_fire_mqtt_message(
hass,
"homeassistant/sensor/sbfspot_0/sbfspot_12345/config",

View File

@@ -536,6 +536,7 @@ async def test_loading_subentries(
async def test_loading_subentry_with_bad_component_schema(
hass: HomeAssistant,
mqtt_mock_entry: MqttMockHAClientGenerator,
mqtt_config_subentries_data: tuple[dict[str, Any]],
device_registry: dr.DeviceRegistry,
caplog: pytest.LogCaptureFixture,
) -> None:
@@ -564,9 +565,10 @@ async def test_loading_subentry_with_bad_component_schema(
)
],
)
async def test_qos_on_mqtt_device_from_subentry(
async def test_qos_on_mqt_device_from_subentry(
hass: HomeAssistant,
mqtt_mock_entry: MqttMockHAClientGenerator,
mqtt_config_subentries_data: tuple[dict[str, Any]],
device_registry: dr.DeviceRegistry,
) -> None:
"""Test QoS is set correctly on entities from MQTT device."""

View File

@@ -49,6 +49,7 @@
]),
'icon': 'mdi:speaker',
'is_volume_muted': False,
'last_non_buffering_state': <MediaPlayerState.PLAYING: 'playing'>,
'mass_player_type': 'player',
'media_album_name': 'Test Album',
'media_artist': 'Test Artist',
@@ -121,6 +122,7 @@
]),
'icon': 'mdi:speaker-multiple',
'is_volume_muted': False,
'last_non_buffering_state': <MediaPlayerState.IDLE: 'idle'>,
'mass_player_type': 'group',
'media_album_name': 'Use Your Illusion I',
'media_artist': "Guns N' Roses",
@@ -198,6 +200,7 @@
'group_members': list([
]),
'icon': 'mdi:speaker',
'last_non_buffering_state': <MediaPlayerState.OFF: 'off'>,
'mass_player_type': 'player',
'sound_mode_list': list([
'munich_translation',

Some files were not shown because too many files have changed in this diff Show More