forked from home-assistant/core
Compare commits
28 Commits
2024.4.0b0
...
2024.4.0b1
| Author | SHA1 | Date | |
|---|---|---|---|
| 612988cf3e | |||
| 7a53ea4b92 | |||
| 8e4cf4e4a7 | |||
| 53ba732ed0 | |||
| c81e9447f9 | |||
| 8cd8718855 | |||
| aa30194249 | |||
| 80273b4873 | |||
| 99282d27c6 | |||
| ba12652cbc | |||
| 5e0a0718e3 | |||
| 1c6689be41 | |||
| b143390d88 | |||
| 42580a1113 | |||
| 21bff95bd7 | |||
| 737e5e70ec | |||
| f204faf202 | |||
| f141be73c7 | |||
| 04bfb1de3c | |||
| 541a6c5f64 | |||
| 824d6afa24 | |||
| 53cc4b8c37 | |||
| a91c03b164 | |||
| f8edab0c12 | |||
| 00993a6be3 | |||
| a18184a4c0 | |||
| e2710184cb | |||
| 9319528e0e |
@@ -51,6 +51,30 @@ jobs:
|
||||
with:
|
||||
ignore-dev: true
|
||||
|
||||
- name: Fail if translations files are checked in
|
||||
run: |
|
||||
if [ -n "$(find homeassistant/components/*/translations -type f)" ]; then
|
||||
echo "Translations files are checked in, please remove the following files:"
|
||||
find homeassistant/components/*/translations -type f
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Archive translations
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
build_base:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
@@ -159,10 +183,15 @@ jobs:
|
||||
# are not available.
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
@@ -186,17 +215,6 @@ jobs:
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Archive translations
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
@@ -448,10 +466,15 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
|
||||
- name: Build package
|
||||
shell: bash
|
||||
|
||||
@@ -140,7 +140,6 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
"""Return a dict include default options."""
|
||||
return {
|
||||
ATTR_AUDIO_OUTPUT: AudioOutput.MP3,
|
||||
ATTR_VOICE: self._voice,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -178,7 +177,18 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str | None = options.get(ATTR_VOICE)
|
||||
if original_voice is None and language == self._language:
|
||||
original_voice = self._voice
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = TTS_VOICES[language][0]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
@@ -237,7 +247,6 @@ class CloudProvider(Provider):
|
||||
"""Return a dict include default options."""
|
||||
return {
|
||||
ATTR_AUDIO_OUTPUT: AudioOutput.MP3,
|
||||
ATTR_VOICE: self._voice,
|
||||
}
|
||||
|
||||
async def async_get_tts_audio(
|
||||
@@ -248,7 +257,18 @@ class CloudProvider(Provider):
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str | None = options.get(ATTR_VOICE)
|
||||
if original_voice is None and language == self._language:
|
||||
original_voice = self._voice
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = TTS_VOICES[language][0]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "Service is already configured"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect"
|
||||
},
|
||||
"flow_title": "{title}",
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Do you want to set up Devialet device {device}?"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host"
|
||||
},
|
||||
"description": "Please enter the host name or IP address of the Devialet device."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyDuotecno==2024.1.2"]
|
||||
"requirements": ["pyDuotecno==2024.3.2"]
|
||||
}
|
||||
|
||||
@@ -54,13 +54,14 @@ class Light(CoordinatorEntity[FjaraskupanCoordinator], LightEntity):
|
||||
async with self.coordinator.async_connect_and_update() as device:
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
await device.send_dim(int(kwargs[ATTR_BRIGHTNESS] * (100.0 / 255.0)))
|
||||
elif not self.is_on:
|
||||
await device.send_command(COMMAND_LIGHT_ON_OFF)
|
||||
else:
|
||||
await device.send_dim(100)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
if self.is_on:
|
||||
async with self.coordinator.async_connect_and_update() as device:
|
||||
await device.send_dim(0)
|
||||
await device.send_command(COMMAND_LIGHT_ON_OFF)
|
||||
|
||||
@property
|
||||
|
||||
@@ -14,5 +14,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/fjaraskupan",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bleak", "fjaraskupan"],
|
||||
"requirements": ["fjaraskupan==2.2.0"]
|
||||
"requirements": ["fjaraskupan==2.3.0"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20240327.0"]
|
||||
"requirements": ["home-assistant-frontend==20240328.0"]
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ def store_velocity(
|
||||
value: float | None,
|
||||
) -> None:
|
||||
"""Store the desired shade velocity in the coordinator."""
|
||||
coordinator.data.update_shade_position(shade_id, ShadePosition(velocity=value))
|
||||
coordinator.data.update_shade_velocity(shade_id, ShadePosition(velocity=value))
|
||||
|
||||
|
||||
NUMBERS: Final = (
|
||||
|
||||
@@ -13,14 +13,11 @@ from .util import async_map_data_by_id
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
POSITION_FIELDS = fields(ShadePosition)
|
||||
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
|
||||
|
||||
|
||||
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
|
||||
"""Copy position data from source to target for None values only."""
|
||||
# the hub will always return a velocity of 0 on initial connect,
|
||||
# separate definition to store consistent value in HA
|
||||
# this value is purely driven from HA
|
||||
for field in POSITION_FIELDS:
|
||||
if (value := getattr(source, field.name)) is not None:
|
||||
setattr(target, field.name, value)
|
||||
@@ -76,3 +73,11 @@ class PowerviewShadeData:
|
||||
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
|
||||
"""Update a single shades position."""
|
||||
copy_position_data(new_position, self.get_shade_position(shade_id))
|
||||
|
||||
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
|
||||
"""Update a single shades velocity."""
|
||||
# the hub will always return a velocity of 0 on initial connect,
|
||||
# separate definition to store consistent value in HA
|
||||
# this value is purely driven from HA
|
||||
if shade_data.velocity is not None:
|
||||
self.get_shade_position(shade_id).velocity = shade_data.velocity
|
||||
|
||||
@@ -79,12 +79,12 @@ async def handle_async_init_result(hass: HomeAssistant, domain: str, conf: dict)
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.8.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders=ISSUE_PLACEHOLDER,
|
||||
)
|
||||
|
||||
|
||||
@@ -313,6 +313,6 @@ DISCOVERY_SCHEMAS = [
|
||||
clusters.Thermostat.Attributes.UnoccupiedCoolingSetpoint,
|
||||
clusters.Thermostat.Attributes.UnoccupiedHeatingSetpoint,
|
||||
),
|
||||
device_type=(device_types.Thermostat,),
|
||||
device_type=(device_types.Thermostat, device_types.RoomAirConditioner),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -86,6 +86,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_types.ColorDimmerSwitch,
|
||||
device_types.DimmerSwitch,
|
||||
device_types.Thermostat,
|
||||
device_types.RoomAirConditioner,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from pynobo import nobo
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_AUTO_DISCOVERED, CONF_SERIAL, DOMAIN
|
||||
|
||||
@@ -19,7 +20,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
serial = entry.data[CONF_SERIAL]
|
||||
discover = entry.data[CONF_AUTO_DISCOVERED]
|
||||
ip_address = None if discover else entry.data[CONF_IP_ADDRESS]
|
||||
hub = nobo(serial=serial, ip=ip_address, discover=discover, synchronous=False)
|
||||
hub = nobo(
|
||||
serial=serial,
|
||||
ip=ip_address,
|
||||
discover=discover,
|
||||
synchronous=False,
|
||||
timezone=dt_util.DEFAULT_TIME_ZONE,
|
||||
)
|
||||
await hub.connect()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nobo_hub",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["pynobo==1.6.0"]
|
||||
"requirements": ["pynobo==1.8.0"]
|
||||
}
|
||||
|
||||
@@ -110,5 +110,46 @@ MODEL_NAMES = [ # https://ollama.com/library
|
||||
"starcoder",
|
||||
"phind-codellama",
|
||||
"starcoder2",
|
||||
"yi",
|
||||
"orca2",
|
||||
"falcon",
|
||||
"wizard-math",
|
||||
"dolphin-phi",
|
||||
"starling-lm",
|
||||
"nous-hermes",
|
||||
"stable-code",
|
||||
"medllama2",
|
||||
"bakllava",
|
||||
"codeup",
|
||||
"wizardlm-uncensored",
|
||||
"solar",
|
||||
"everythinglm",
|
||||
"sqlcoder",
|
||||
"dolphincoder",
|
||||
"nous-hermes2-mixtral",
|
||||
"stable-beluga",
|
||||
"yarn-mistral",
|
||||
"stablelm2",
|
||||
"samantha-mistral",
|
||||
"meditron",
|
||||
"stablelm-zephyr",
|
||||
"magicoder",
|
||||
"yarn-llama2",
|
||||
"llama-pro",
|
||||
"deepseek-llm",
|
||||
"wizard-vicuna",
|
||||
"codebooga",
|
||||
"mistrallite",
|
||||
"all-minilm",
|
||||
"nexusraven",
|
||||
"open-orca-platypus2",
|
||||
"goliath",
|
||||
"notux",
|
||||
"megadolphin",
|
||||
"alfred",
|
||||
"xwinlm",
|
||||
"wizardlm",
|
||||
"duckdb-nsql",
|
||||
"notus",
|
||||
]
|
||||
DEFAULT_MODEL = "llama2:latest"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioraven==0.5.2"],
|
||||
"requirements": ["aioraven==0.5.3"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "0403",
|
||||
|
||||
@@ -53,6 +53,7 @@ STATISTICS_ROWS_SCHEMA_VERSION = 23
|
||||
CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||
STATES_META_SCHEMA_VERSION = 38
|
||||
LAST_REPORTED_SCHEMA_VERSION = 43
|
||||
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
ESTIMATED_QUEUE_ITEM_SIZE,
|
||||
KEEPALIVE_TIME,
|
||||
LAST_REPORTED_SCHEMA_VERSION,
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION,
|
||||
MARIADB_PYMYSQL_URL_PREFIX,
|
||||
MARIADB_URL_PREFIX,
|
||||
@@ -1203,7 +1204,7 @@ class Recorder(threading.Thread):
|
||||
if (
|
||||
pending_last_reported
|
||||
:= self.states_manager.get_pending_last_reported_timestamp()
|
||||
):
|
||||
) and self.schema_version >= LAST_REPORTED_SCHEMA_VERSION:
|
||||
with session.no_autoflush:
|
||||
session.execute(
|
||||
update(States),
|
||||
|
||||
@@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from ... import recorder
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION
|
||||
from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States
|
||||
from ..filters import Filters
|
||||
from ..models import (
|
||||
@@ -327,9 +328,10 @@ def _state_changed_during_period_stmt(
|
||||
limit: int | None,
|
||||
include_start_time_state: bool,
|
||||
run_start_ts: float | None,
|
||||
include_last_reported: bool,
|
||||
) -> Select | CompoundSelect:
|
||||
stmt = (
|
||||
_stmt_and_join_attributes(no_attributes, False, True)
|
||||
_stmt_and_join_attributes(no_attributes, False, include_last_reported)
|
||||
.filter(
|
||||
(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
@@ -361,22 +363,22 @@ def _state_changed_during_period_stmt(
|
||||
single_metadata_id,
|
||||
no_attributes,
|
||||
False,
|
||||
True,
|
||||
include_last_reported,
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
True,
|
||||
include_last_reported,
|
||||
),
|
||||
_select_from_subquery(
|
||||
stmt.subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
True,
|
||||
include_last_reported,
|
||||
),
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
True,
|
||||
include_last_reported,
|
||||
)
|
||||
|
||||
|
||||
@@ -391,6 +393,9 @@ def state_changes_during_period(
|
||||
include_start_time_state: bool = True,
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return states changes during UTC period start_time - end_time."""
|
||||
has_last_reported = (
|
||||
recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION
|
||||
)
|
||||
if not entity_id:
|
||||
raise ValueError("entity_id must be provided")
|
||||
entity_ids = [entity_id.lower()]
|
||||
@@ -423,12 +428,14 @@ def state_changes_during_period(
|
||||
limit,
|
||||
include_start_time_state,
|
||||
run_start_ts,
|
||||
has_last_reported,
|
||||
),
|
||||
track_on=[
|
||||
bool(end_time_ts),
|
||||
no_attributes,
|
||||
bool(limit),
|
||||
include_start_time_state,
|
||||
has_last_reported,
|
||||
],
|
||||
)
|
||||
return cast(
|
||||
@@ -475,10 +482,10 @@ def _get_last_state_changes_single_stmt(metadata_id: int) -> Select:
|
||||
|
||||
|
||||
def _get_last_state_changes_multiple_stmt(
|
||||
number_of_states: int, metadata_id: int
|
||||
number_of_states: int, metadata_id: int, include_last_reported: bool
|
||||
) -> Select:
|
||||
return (
|
||||
_stmt_and_join_attributes(False, False, True)
|
||||
_stmt_and_join_attributes(False, False, include_last_reported)
|
||||
.where(
|
||||
States.state_id
|
||||
== (
|
||||
@@ -500,6 +507,9 @@ def get_last_state_changes(
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
||||
) -> MutableMapping[str, list[State]]:
|
||||
"""Return the last number_of_states."""
|
||||
has_last_reported = (
|
||||
recorder.get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION
|
||||
)
|
||||
entity_id_lower = entity_id.lower()
|
||||
entity_ids = [entity_id_lower]
|
||||
|
||||
@@ -524,8 +534,9 @@ def get_last_state_changes(
|
||||
else:
|
||||
stmt = lambda_stmt(
|
||||
lambda: _get_last_state_changes_multiple_stmt(
|
||||
number_of_states, metadata_id
|
||||
number_of_states, metadata_id, has_last_reported
|
||||
),
|
||||
track_on=[has_last_reported],
|
||||
)
|
||||
states = list(execute_stmt_lambda_element(session, stmt, orm_rows=False))
|
||||
return cast(
|
||||
|
||||
@@ -136,6 +136,9 @@ class Searcher:
|
||||
# Scripts referencing this area
|
||||
self._add(ItemType.SCRIPT, script.scripts_with_area(self.hass, area_id))
|
||||
|
||||
# Entity in this area, will extend this with the entities of the devices in this area
|
||||
entity_entries = er.async_entries_for_area(self._entity_registry, area_id)
|
||||
|
||||
# Devices in this area
|
||||
for device in dr.async_entries_for_area(self._device_registry, area_id):
|
||||
self._add(ItemType.DEVICE, device.id)
|
||||
@@ -160,10 +163,10 @@ class Searcher:
|
||||
# Skip the entity if it's in a different area
|
||||
if entity_entry.area_id is not None:
|
||||
continue
|
||||
self._add(ItemType.ENTITY, entity_entry.entity_id)
|
||||
entity_entries.append(entity_entry)
|
||||
|
||||
# Entities in this area
|
||||
for entity_entry in er.async_entries_for_area(self._entity_registry, area_id):
|
||||
# Process entities in this area
|
||||
for entity_entry in entity_entries:
|
||||
self._add(ItemType.ENTITY, entity_entry.entity_id)
|
||||
|
||||
# If this entity also exists as a resource, we add it.
|
||||
|
||||
@@ -81,12 +81,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.7.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders=ISSUE_PLACEHOLDER,
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "The Streamlabs water YAML configuration import failed",
|
||||
"description": "Configuring Streamlabs water using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to Streamlabs water works and restart Home Assistant to try again or remove the Streamlabs water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
"description": "Configuring Streamlabs water using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Streamlabs water works and restart Home Assistant to try again or remove the Streamlabs water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_unknown": {
|
||||
"title": "The Streamlabs water YAML configuration import failed",
|
||||
|
||||
@@ -74,12 +74,12 @@ async def async_setup_platform(
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.7.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders=ISSUE_PLACEHOLDER,
|
||||
)
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
},
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "The Suez water YAML configuration import failed",
|
||||
"description": "Configuring Suez water using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to Suez water works and restart Home Assistant to try again or remove the Suez water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
"description": "Configuring Suez water using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Suez water works and restart Home Assistant to try again or remove the Suez water YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_unknown": {
|
||||
"title": "The Suez water YAML configuration import failed",
|
||||
|
||||
@@ -131,12 +131,12 @@ async def async_setup_platform(
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.7.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_${result['reason']}",
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders=PLACEHOLDERS,
|
||||
)
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "The swiss public transport YAML configuration import cannot connect to server",
|
||||
"description": "Configuring swiss public transport using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the [opendata server]({opendata_url}). In case the server is down, try again later."
|
||||
"description": "Configuring swiss public transport using YAML is being removed but there was a connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the [opendata server]({opendata_url}). In case the server is down, try again later."
|
||||
},
|
||||
"deprecated_yaml_import_issue_bad_config": {
|
||||
"title": "The swiss public transport YAML configuration import request failed due to bad config",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pytile"],
|
||||
"requirements": ["pytile==2023.04.0"]
|
||||
"requirements": ["pytile==2023.12.0"]
|
||||
}
|
||||
|
||||
@@ -40,7 +40,6 @@ from .const import (
|
||||
SERVER_UNAVAILABLE,
|
||||
SWITCH_KEY_MAP,
|
||||
TRACKABLES,
|
||||
TRACKER_ACTIVITY_STATUS_UPDATED,
|
||||
TRACKER_HARDWARE_STATUS_UPDATED,
|
||||
TRACKER_POSITION_UPDATED,
|
||||
TRACKER_SWITCH_STATUS_UPDATED,
|
||||
@@ -220,9 +219,6 @@ class TractiveClient:
|
||||
if server_was_unavailable:
|
||||
_LOGGER.debug("Tractive is back online")
|
||||
server_was_unavailable = False
|
||||
if event["message"] == "activity_update":
|
||||
self._send_activity_update(event)
|
||||
continue
|
||||
if event["message"] == "wellness_overview":
|
||||
self._send_wellness_update(event)
|
||||
continue
|
||||
@@ -291,15 +287,6 @@ class TractiveClient:
|
||||
TRACKER_SWITCH_STATUS_UPDATED, event["tracker_id"], payload
|
||||
)
|
||||
|
||||
def _send_activity_update(self, event: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
ATTR_MINUTES_ACTIVE: event["progress"]["achieved_minutes"],
|
||||
ATTR_DAILY_GOAL: event["progress"]["goal_minutes"],
|
||||
}
|
||||
self._dispatch_tracker_event(
|
||||
TRACKER_ACTIVITY_STATUS_UPDATED, event["pet_id"], payload
|
||||
)
|
||||
|
||||
def _send_wellness_update(self, event: dict[str, Any]) -> None:
|
||||
sleep_day = None
|
||||
sleep_night = None
|
||||
@@ -309,6 +296,8 @@ class TractiveClient:
|
||||
payload = {
|
||||
ATTR_ACTIVITY_LABEL: event["wellness"].get("activity_label"),
|
||||
ATTR_CALORIES: event["activity"]["calories"],
|
||||
ATTR_DAILY_GOAL: event["activity"]["minutes_goal"],
|
||||
ATTR_MINUTES_ACTIVE: event["activity"]["minutes_active"],
|
||||
ATTR_MINUTES_DAY_SLEEP: sleep_day,
|
||||
ATTR_MINUTES_NIGHT_SLEEP: sleep_night,
|
||||
ATTR_MINUTES_REST: event["activity"]["minutes_rest"],
|
||||
|
||||
@@ -26,7 +26,6 @@ CLIENT_ID = "625e5349c3c3b41c28a669f1"
|
||||
CLIENT = "client"
|
||||
TRACKABLES = "trackables"
|
||||
|
||||
TRACKER_ACTIVITY_STATUS_UPDATED = f"{DOMAIN}_tracker_activity_updated"
|
||||
TRACKER_HARDWARE_STATUS_UPDATED = f"{DOMAIN}_tracker_hardware_status_updated"
|
||||
TRACKER_POSITION_UPDATED = f"{DOMAIN}_tracker_position_updated"
|
||||
TRACKER_SWITCH_STATUS_UPDATED = f"{DOMAIN}_tracker_switch_updated"
|
||||
|
||||
@@ -37,7 +37,6 @@ from .const import (
|
||||
CLIENT,
|
||||
DOMAIN,
|
||||
TRACKABLES,
|
||||
TRACKER_ACTIVITY_STATUS_UPDATED,
|
||||
TRACKER_HARDWARE_STATUS_UPDATED,
|
||||
TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
)
|
||||
@@ -118,7 +117,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
|
||||
key=ATTR_MINUTES_ACTIVE,
|
||||
translation_key="activity_time",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_ACTIVITY_STATUS_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
@@ -139,7 +138,7 @@ SENSOR_TYPES: tuple[TractiveSensorEntityDescription, ...] = (
|
||||
key=ATTR_DAILY_GOAL,
|
||||
translation_key="daily_goal",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
signal_prefix=TRACKER_ACTIVITY_STATUS_UPDATED,
|
||||
signal_prefix=TRACKER_WELLNESS_STATUS_UPDATED,
|
||||
),
|
||||
TractiveSensorEntityDescription(
|
||||
key=ATTR_MINUTES_DAY_SLEEP,
|
||||
|
||||
@@ -16,7 +16,7 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
from typing import Any, TypedDict, final
|
||||
from typing import Any, Final, TypedDict, final
|
||||
|
||||
from aiohttp import web
|
||||
import mutagen
|
||||
@@ -99,6 +99,13 @@ ATTR_PREFERRED_SAMPLE_CHANNELS = "preferred_sample_channels"
|
||||
ATTR_MEDIA_PLAYER_ENTITY_ID = "media_player_entity_id"
|
||||
ATTR_VOICE = "voice"
|
||||
|
||||
_DEFAULT_FORMAT = "mp3"
|
||||
_PREFFERED_FORMAT_OPTIONS: Final[set[str]] = {
|
||||
ATTR_PREFERRED_FORMAT,
|
||||
ATTR_PREFERRED_SAMPLE_RATE,
|
||||
ATTR_PREFERRED_SAMPLE_CHANNELS,
|
||||
}
|
||||
|
||||
CONF_LANG = "language"
|
||||
|
||||
SERVICE_CLEAR_CACHE = "clear_cache"
|
||||
@@ -569,25 +576,23 @@ class SpeechManager:
|
||||
):
|
||||
raise HomeAssistantError(f"Language '{language}' not supported")
|
||||
|
||||
options = options or {}
|
||||
supported_options = engine_instance.supported_options or []
|
||||
|
||||
# Update default options with provided options
|
||||
invalid_opts: list[str] = []
|
||||
merged_options = dict(engine_instance.default_options or {})
|
||||
merged_options.update(options or {})
|
||||
for option_name, option_value in options.items():
|
||||
# Only count an option as invalid if it's not a "preferred format"
|
||||
# option. These are used as hints to the TTS system if supported,
|
||||
# and otherwise as parameters to ffmpeg conversion.
|
||||
if (option_name in supported_options) or (
|
||||
option_name in _PREFFERED_FORMAT_OPTIONS
|
||||
):
|
||||
merged_options[option_name] = option_value
|
||||
else:
|
||||
invalid_opts.append(option_name)
|
||||
|
||||
supported_options = list(engine_instance.supported_options or [])
|
||||
|
||||
# ATTR_PREFERRED_* options are always "supported" since they're used to
|
||||
# convert audio after the TTS has run (if necessary).
|
||||
supported_options.extend(
|
||||
(
|
||||
ATTR_PREFERRED_FORMAT,
|
||||
ATTR_PREFERRED_SAMPLE_RATE,
|
||||
ATTR_PREFERRED_SAMPLE_CHANNELS,
|
||||
)
|
||||
)
|
||||
|
||||
invalid_opts = [
|
||||
opt_name for opt_name in merged_options if opt_name not in supported_options
|
||||
]
|
||||
if invalid_opts:
|
||||
raise HomeAssistantError(f"Invalid options found: {invalid_opts}")
|
||||
|
||||
@@ -687,10 +692,31 @@ class SpeechManager:
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
options = options or {}
|
||||
options = dict(options or {})
|
||||
supported_options = engine_instance.supported_options or []
|
||||
|
||||
# Default to MP3 unless a different format is preferred
|
||||
final_extension = options.get(ATTR_PREFERRED_FORMAT, "mp3")
|
||||
# Extract preferred format options.
|
||||
#
|
||||
# These options are used by Assist pipelines, etc. to get a format that
|
||||
# the voice satellite will support.
|
||||
#
|
||||
# The TTS system ideally supports options directly so we won't have
|
||||
# to convert with ffmpeg later. If not, we pop the options here and
|
||||
# perform the conversation after receiving the audio.
|
||||
if ATTR_PREFERRED_FORMAT in supported_options:
|
||||
final_extension = options.get(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT)
|
||||
else:
|
||||
final_extension = options.pop(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT)
|
||||
|
||||
if ATTR_PREFERRED_SAMPLE_RATE in supported_options:
|
||||
sample_rate = options.get(ATTR_PREFERRED_SAMPLE_RATE)
|
||||
else:
|
||||
sample_rate = options.pop(ATTR_PREFERRED_SAMPLE_RATE, None)
|
||||
|
||||
if ATTR_PREFERRED_SAMPLE_CHANNELS in supported_options:
|
||||
sample_channels = options.get(ATTR_PREFERRED_SAMPLE_CHANNELS)
|
||||
else:
|
||||
sample_channels = options.pop(ATTR_PREFERRED_SAMPLE_CHANNELS, None)
|
||||
|
||||
async def get_tts_data() -> str:
|
||||
"""Handle data available."""
|
||||
@@ -716,8 +742,8 @@ class SpeechManager:
|
||||
# rate/format/channel count is requested.
|
||||
needs_conversion = (
|
||||
(final_extension != extension)
|
||||
or (ATTR_PREFERRED_SAMPLE_RATE in options)
|
||||
or (ATTR_PREFERRED_SAMPLE_CHANNELS in options)
|
||||
or (sample_rate is not None)
|
||||
or (sample_channels is not None)
|
||||
)
|
||||
|
||||
if needs_conversion:
|
||||
@@ -726,8 +752,8 @@ class SpeechManager:
|
||||
extension,
|
||||
data,
|
||||
to_extension=final_extension,
|
||||
to_sample_rate=options.get(ATTR_PREFERRED_SAMPLE_RATE),
|
||||
to_sample_channels=options.get(ATTR_PREFERRED_SAMPLE_CHANNELS),
|
||||
to_sample_rate=sample_rate,
|
||||
to_sample_channels=sample_channels,
|
||||
)
|
||||
|
||||
# Create file infos
|
||||
|
||||
@@ -261,7 +261,8 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
verify_ssl=verify_ssl,
|
||||
cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect_cache")),
|
||||
cache_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
config_dir=Path(self.hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
)
|
||||
|
||||
errors = {}
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyunifiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyunifiprotect==5.0.2", "unifi-discovery==1.1.8"],
|
||||
"requirements": ["pyunifiprotect==5.1.2", "unifi-discovery==1.1.8"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -145,7 +145,8 @@ def async_create_api_client(
|
||||
override_connection_host=entry.options.get(CONF_OVERRIDE_CHOST, False),
|
||||
ignore_stats=not entry.options.get(CONF_ALL_UPDATES, False),
|
||||
ignore_unadopted=False,
|
||||
cache_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect_cache")),
|
||||
cache_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
config_dir=Path(hass.config.path(STORAGE_DIR, "unifiprotect")),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal, DecimalException, InvalidOperation
|
||||
@@ -13,6 +14,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
ATTR_LAST_RESET,
|
||||
DEVICE_CLASS_UNITS,
|
||||
RestoreSensor,
|
||||
SensorDeviceClass,
|
||||
SensorExtraStoredData,
|
||||
@@ -21,12 +23,12 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.components.sensor.recorder import _suggest_report_issue
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfEnergy,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, State, callback
|
||||
from homeassistant.helpers import (
|
||||
@@ -47,6 +49,7 @@ from homeassistant.helpers.template import is_number
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import slugify
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import (
|
||||
ATTR_CRON_PATTERN,
|
||||
@@ -97,12 +100,6 @@ ATTR_LAST_PERIOD = "last_period"
|
||||
ATTR_LAST_VALID_STATE = "last_valid_state"
|
||||
ATTR_TARIFF = "tariff"
|
||||
|
||||
DEVICE_CLASS_MAP = {
|
||||
UnitOfEnergy.WATT_HOUR: SensorDeviceClass.ENERGY,
|
||||
UnitOfEnergy.KILO_WATT_HOUR: SensorDeviceClass.ENERGY,
|
||||
}
|
||||
|
||||
|
||||
PRECISION = 3
|
||||
PAUSED = "paused"
|
||||
COLLECTING = "collecting"
|
||||
@@ -313,6 +310,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData):
|
||||
last_reset: datetime | None
|
||||
last_valid_state: Decimal | None
|
||||
status: str
|
||||
input_device_class: SensorDeviceClass | None
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dict representation of the utility sensor data."""
|
||||
@@ -324,6 +322,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData):
|
||||
str(self.last_valid_state) if self.last_valid_state else None
|
||||
)
|
||||
data["status"] = self.status
|
||||
data["input_device_class"] = str(self.input_device_class)
|
||||
|
||||
return data
|
||||
|
||||
@@ -343,6 +342,9 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData):
|
||||
else None
|
||||
)
|
||||
status: str = restored["status"]
|
||||
input_device_class = try_parse_enum(
|
||||
SensorDeviceClass, restored.get("input_device_class")
|
||||
)
|
||||
except KeyError:
|
||||
# restored is a dict, but does not have all values
|
||||
return None
|
||||
@@ -357,6 +359,7 @@ class UtilitySensorExtraStoredData(SensorExtraStoredData):
|
||||
last_reset,
|
||||
last_valid_state,
|
||||
status,
|
||||
input_device_class,
|
||||
)
|
||||
|
||||
|
||||
@@ -397,6 +400,7 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
self._last_valid_state = None
|
||||
self._collecting = None
|
||||
self._name = name
|
||||
self._input_device_class = None
|
||||
self._unit_of_measurement = None
|
||||
self._period = meter_type
|
||||
if meter_type is not None:
|
||||
@@ -416,9 +420,10 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
self._tariff = tariff
|
||||
self._tariff_entity = tariff_entity
|
||||
|
||||
def start(self, unit):
|
||||
def start(self, attributes: Mapping[str, Any]) -> None:
|
||||
"""Initialize unit and state upon source initial update."""
|
||||
self._unit_of_measurement = unit
|
||||
self._input_device_class = attributes.get(ATTR_DEVICE_CLASS)
|
||||
self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
self._state = 0
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -482,6 +487,7 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is None:
|
||||
return
|
||||
new_state_attributes: Mapping[str, Any] = new_state.attributes or {}
|
||||
|
||||
# First check if the new_state is valid (see discussion in PR #88446)
|
||||
if (new_state_val := self._validate_state(new_state)) is None:
|
||||
@@ -498,7 +504,7 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][
|
||||
DATA_TARIFF_SENSORS
|
||||
]:
|
||||
sensor.start(new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT))
|
||||
sensor.start(new_state_attributes)
|
||||
if self._unit_of_measurement is None:
|
||||
_LOGGER.warning(
|
||||
"Source sensor %s has no unit of measurement. Please %s",
|
||||
@@ -512,7 +518,8 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
# If net_consumption is off, the adjustment must be non-negative
|
||||
self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line
|
||||
|
||||
self._unit_of_measurement = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS)
|
||||
self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
self._last_valid_state = new_state_val
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -600,6 +607,7 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
if (last_sensor_data := await self.async_get_last_sensor_data()) is not None:
|
||||
# new introduced in 2022.04
|
||||
self._state = last_sensor_data.native_value
|
||||
self._input_device_class = last_sensor_data.input_device_class
|
||||
self._unit_of_measurement = last_sensor_data.native_unit_of_measurement
|
||||
self._last_period = last_sensor_data.last_period
|
||||
self._last_reset = last_sensor_data.last_reset
|
||||
@@ -693,7 +701,11 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the device class of the sensor."""
|
||||
return DEVICE_CLASS_MAP.get(self._unit_of_measurement)
|
||||
if self._input_device_class is not None:
|
||||
return self._input_device_class
|
||||
if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]:
|
||||
return SensorDeviceClass.ENERGY
|
||||
return None
|
||||
|
||||
@property
|
||||
def state_class(self):
|
||||
@@ -744,6 +756,7 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
self._last_reset,
|
||||
self._last_valid_state,
|
||||
PAUSED if self._collecting is None else COLLECTING,
|
||||
self._input_device_class,
|
||||
)
|
||||
|
||||
async def async_get_last_sensor_data(self) -> UtilitySensorExtraStoredData | None:
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"bellows==0.38.1",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.112",
|
||||
"zha-quirks==0.0.113",
|
||||
"zigpy-deconz==0.23.1",
|
||||
"zigpy==0.63.5",
|
||||
"zigpy-xbee==0.20.1",
|
||||
|
||||
@@ -18,7 +18,7 @@ from .util.signal_type import SignalType
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2024
|
||||
MINOR_VERSION: Final = 4
|
||||
PATCH_VERSION: Final = "0b0"
|
||||
PATCH_VERSION: Final = "0b1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
||||
@@ -615,6 +615,11 @@ class _ScriptRun:
|
||||
|
||||
delay = delay_delta.total_seconds()
|
||||
self._changed()
|
||||
if not delay:
|
||||
# Handle an empty delay
|
||||
trace_set_result(delay=delay, done=True)
|
||||
return
|
||||
|
||||
trace_set_result(delay=delay, done=False)
|
||||
futures, timeout_handle, timeout_future = self._async_futures_with_timeout(
|
||||
delay
|
||||
|
||||
@@ -30,7 +30,7 @@ habluetooth==2.4.2
|
||||
hass-nabucasa==0.79.0
|
||||
hassil==1.6.1
|
||||
home-assistant-bluetooth==1.12.0
|
||||
home-assistant-frontend==20240327.0
|
||||
home-assistant-frontend==20240328.0
|
||||
home-assistant-intents==2024.3.27
|
||||
httpx==0.27.0
|
||||
ifaddr==0.2.0
|
||||
|
||||
+1
-3
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2024.4.0b0"
|
||||
version = "2024.4.0b1"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
@@ -504,8 +504,6 @@ filterwarnings = [
|
||||
# https://github.com/eclipse/paho.mqtt.python/issues/653 - >=2.0.0
|
||||
# https://github.com/eclipse/paho.mqtt.python/pull/665
|
||||
"ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:paho.mqtt.client",
|
||||
# https://github.com/bachya/pytile/pull/280 - >=2023.10.0
|
||||
"ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pytile.tile",
|
||||
# https://github.com/rytilahti/python-miio/pull/1809 - >=0.6.0.dev0
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.protocol",
|
||||
"ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:miio.miioprotocol",
|
||||
|
||||
@@ -350,7 +350,7 @@ aiopyarr==23.4.0
|
||||
aioqsw==0.3.5
|
||||
|
||||
# homeassistant.components.rainforest_raven
|
||||
aioraven==0.5.2
|
||||
aioraven==0.5.3
|
||||
|
||||
# homeassistant.components.recollect_waste
|
||||
aiorecollect==2023.09.0
|
||||
@@ -864,7 +864,7 @@ fivem-api==0.1.2
|
||||
fixerio==1.0.0a0
|
||||
|
||||
# homeassistant.components.fjaraskupan
|
||||
fjaraskupan==2.2.0
|
||||
fjaraskupan==2.3.0
|
||||
|
||||
# homeassistant.components.flexit_bacnet
|
||||
flexit_bacnet==2.1.0
|
||||
@@ -1077,7 +1077,7 @@ hole==0.8.0
|
||||
holidays==0.45
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240327.0
|
||||
home-assistant-frontend==20240328.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.3.27
|
||||
@@ -1648,7 +1648,7 @@ pyCEC==0.5.2
|
||||
pyControl4==1.1.0
|
||||
|
||||
# homeassistant.components.duotecno
|
||||
pyDuotecno==2024.1.2
|
||||
pyDuotecno==2024.3.2
|
||||
|
||||
# homeassistant.components.electrasmart
|
||||
pyElectra==1.2.0
|
||||
@@ -1991,7 +1991,7 @@ pynetgear==0.10.10
|
||||
pynetio==0.1.9.1
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.6.0
|
||||
pynobo==1.8.0
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -2311,7 +2311,7 @@ python-vlc==3.0.18122
|
||||
pythonegardia==1.0.52
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2023.04.0
|
||||
pytile==2023.12.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.6
|
||||
@@ -2339,7 +2339,7 @@ pytrydan==0.4.0
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==5.0.2
|
||||
pyunifiprotect==5.1.2
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
@@ -2931,7 +2931,7 @@ zeroconf==0.131.0
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.112
|
||||
zha-quirks==0.0.113
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.12
|
||||
|
||||
@@ -323,7 +323,7 @@ aiopyarr==23.4.0
|
||||
aioqsw==0.3.5
|
||||
|
||||
# homeassistant.components.rainforest_raven
|
||||
aioraven==0.5.2
|
||||
aioraven==0.5.3
|
||||
|
||||
# homeassistant.components.recollect_waste
|
||||
aiorecollect==2023.09.0
|
||||
@@ -702,7 +702,7 @@ fitbit==0.3.1
|
||||
fivem-api==0.1.2
|
||||
|
||||
# homeassistant.components.fjaraskupan
|
||||
fjaraskupan==2.2.0
|
||||
fjaraskupan==2.3.0
|
||||
|
||||
# homeassistant.components.flexit_bacnet
|
||||
flexit_bacnet==2.1.0
|
||||
@@ -876,7 +876,7 @@ hole==0.8.0
|
||||
holidays==0.45
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240327.0
|
||||
home-assistant-frontend==20240328.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.3.27
|
||||
@@ -1298,7 +1298,7 @@ pyCEC==0.5.2
|
||||
pyControl4==1.1.0
|
||||
|
||||
# homeassistant.components.duotecno
|
||||
pyDuotecno==2024.1.2
|
||||
pyDuotecno==2024.3.2
|
||||
|
||||
# homeassistant.components.electrasmart
|
||||
pyElectra==1.2.0
|
||||
@@ -1545,7 +1545,7 @@ pymysensors==0.24.0
|
||||
pynetgear==0.10.10
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.6.0
|
||||
pynobo==1.8.0
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -1781,7 +1781,7 @@ python-technove==1.2.2
|
||||
python-telegram-bot[socks]==21.0.1
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2023.04.0
|
||||
pytile==2023.12.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.6
|
||||
@@ -1806,7 +1806,7 @@ pytrydan==0.4.0
|
||||
pyudev==0.23.2
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
pyunifiprotect==5.0.2
|
||||
pyunifiprotect==5.1.2
|
||||
|
||||
# homeassistant.components.uptimerobot
|
||||
pyuptimerobot==22.2.0
|
||||
@@ -2266,7 +2266,7 @@ zeroconf==0.131.0
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.112
|
||||
zha-quirks==0.0.113
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.23.1
|
||||
|
||||
@@ -168,7 +168,8 @@ def validate_services(config: Config, integration: Integration) -> None:
|
||||
# 2. Check if the service has an icon set in icons.json.
|
||||
# raise an error if not.,
|
||||
for service_name, service_schema in services.items():
|
||||
if service_name not in service_icons:
|
||||
if integration.core and service_name not in service_icons:
|
||||
# This is enforced for Core integrations only
|
||||
integration.add_error(
|
||||
"services",
|
||||
f"Service {service_name} has no icon in icons.json.",
|
||||
|
||||
@@ -111,6 +111,7 @@ class MockTTSProvider(tts.Provider):
|
||||
tts.Voice("fran_drescher", "Fran Drescher"),
|
||||
]
|
||||
}
|
||||
_supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT]
|
||||
|
||||
@property
|
||||
def default_language(self) -> str:
|
||||
@@ -130,7 +131,7 @@ class MockTTSProvider(tts.Provider):
|
||||
@property
|
||||
def supported_options(self) -> list[str]:
|
||||
"""Return list of supported options like voice, emotions."""
|
||||
return ["voice", "age", tts.ATTR_AUDIO_OUTPUT]
|
||||
return self._supported_options
|
||||
|
||||
def get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
|
||||
@@ -11,7 +11,7 @@ import wave
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components import assist_pipeline, stt, tts
|
||||
from homeassistant.components import assist_pipeline, media_source, stt, tts
|
||||
from homeassistant.components.assist_pipeline.const import (
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DOMAIN,
|
||||
@@ -19,9 +19,14 @@ from homeassistant.components.assist_pipeline.const import (
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .conftest import MockSttProvider, MockSttProviderEntity, MockWakeWordEntity
|
||||
from .conftest import (
|
||||
MockSttProvider,
|
||||
MockSttProviderEntity,
|
||||
MockTTSProvider,
|
||||
MockWakeWordEntity,
|
||||
)
|
||||
|
||||
from tests.typing import WebSocketGenerator
|
||||
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
|
||||
BYTES_ONE_SECOND = 16000 * 2
|
||||
|
||||
@@ -729,15 +734,17 @@ def test_pipeline_run_equality(hass: HomeAssistant, init_components) -> None:
|
||||
|
||||
async def test_tts_audio_output(
|
||||
hass: HomeAssistant,
|
||||
mock_stt_provider: MockSttProvider,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_tts_provider: MockTTSProvider,
|
||||
init_components,
|
||||
pipeline_data: assist_pipeline.pipeline.PipelineData,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test using tts_audio_output with wav sets options correctly."""
|
||||
client = await hass_client()
|
||||
assert await async_setup_component(hass, media_source.DOMAIN, {})
|
||||
|
||||
def event_callback(event):
|
||||
pass
|
||||
events: list[assist_pipeline.PipelineEvent] = []
|
||||
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
pipeline_id = pipeline_store.async_get_preferred_item()
|
||||
@@ -753,7 +760,7 @@ async def test_tts_audio_output(
|
||||
pipeline=pipeline,
|
||||
start_stage=assist_pipeline.PipelineStage.TTS,
|
||||
end_stage=assist_pipeline.PipelineStage.TTS,
|
||||
event_callback=event_callback,
|
||||
event_callback=events.append,
|
||||
tts_audio_output="wav",
|
||||
),
|
||||
)
|
||||
@@ -764,3 +771,87 @@ async def test_tts_audio_output(
|
||||
assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_FORMAT) == "wav"
|
||||
assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_RATE) == 16000
|
||||
assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS) == 1
|
||||
|
||||
with patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio:
|
||||
await pipeline_input.execute()
|
||||
|
||||
for event in events:
|
||||
if event.type == assist_pipeline.PipelineEventType.TTS_END:
|
||||
# We must fetch the media URL to trigger the TTS
|
||||
assert event.data
|
||||
media_id = event.data["tts_output"]["media_id"]
|
||||
resolved = await media_source.async_resolve_media(hass, media_id, None)
|
||||
await client.get(resolved.url)
|
||||
|
||||
# Ensure that no unsupported options were passed in
|
||||
assert mock_get_tts_audio.called
|
||||
options = mock_get_tts_audio.call_args_list[0].kwargs["options"]
|
||||
extra_options = set(options).difference(mock_tts_provider.supported_options)
|
||||
assert len(extra_options) == 0, extra_options
|
||||
|
||||
|
||||
async def test_tts_supports_preferred_format(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_tts_provider: MockTTSProvider,
|
||||
init_components,
|
||||
pipeline_data: assist_pipeline.pipeline.PipelineData,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test that preferred format options are given to the TTS system if supported."""
|
||||
client = await hass_client()
|
||||
assert await async_setup_component(hass, media_source.DOMAIN, {})
|
||||
|
||||
events: list[assist_pipeline.PipelineEvent] = []
|
||||
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
pipeline_id = pipeline_store.async_get_preferred_item()
|
||||
pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id)
|
||||
|
||||
pipeline_input = assist_pipeline.pipeline.PipelineInput(
|
||||
tts_input="This is a test.",
|
||||
conversation_id=None,
|
||||
device_id=None,
|
||||
run=assist_pipeline.pipeline.PipelineRun(
|
||||
hass,
|
||||
context=Context(),
|
||||
pipeline=pipeline,
|
||||
start_stage=assist_pipeline.PipelineStage.TTS,
|
||||
end_stage=assist_pipeline.PipelineStage.TTS,
|
||||
event_callback=events.append,
|
||||
tts_audio_output="wav",
|
||||
),
|
||||
)
|
||||
await pipeline_input.validate()
|
||||
|
||||
# Make the TTS provider support preferred format options
|
||||
supported_options = list(mock_tts_provider.supported_options or [])
|
||||
supported_options.extend(
|
||||
[
|
||||
tts.ATTR_PREFERRED_FORMAT,
|
||||
tts.ATTR_PREFERRED_SAMPLE_RATE,
|
||||
tts.ATTR_PREFERRED_SAMPLE_CHANNELS,
|
||||
]
|
||||
)
|
||||
|
||||
with (
|
||||
patch.object(mock_tts_provider, "_supported_options", supported_options),
|
||||
patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio,
|
||||
):
|
||||
await pipeline_input.execute()
|
||||
|
||||
for event in events:
|
||||
if event.type == assist_pipeline.PipelineEventType.TTS_END:
|
||||
# We must fetch the media URL to trigger the TTS
|
||||
assert event.data
|
||||
media_id = event.data["tts_output"]["media_id"]
|
||||
resolved = await media_source.async_resolve_media(hass, media_id, None)
|
||||
await client.get(resolved.url)
|
||||
|
||||
assert mock_get_tts_audio.called
|
||||
options = mock_get_tts_audio.call_args_list[0].kwargs["options"]
|
||||
|
||||
# We should have received preferred format options in get_tts_audio
|
||||
assert tts.ATTR_PREFERRED_FORMAT in options
|
||||
assert tts.ATTR_PREFERRED_SAMPLE_RATE in options
|
||||
assert tts.ATTR_PREFERRED_SAMPLE_CHANNELS in options
|
||||
|
||||
@@ -12,10 +12,20 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY
|
||||
from homeassistant.components.cloud import DOMAIN, const, tts
|
||||
from homeassistant.components.tts import DOMAIN as TTS_DOMAIN
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
DOMAIN as DOMAIN_MP,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
)
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_LANGUAGE,
|
||||
ATTR_MEDIA_PLAYER_ENTITY_ID,
|
||||
ATTR_MESSAGE,
|
||||
DOMAIN as TTS_DOMAIN,
|
||||
)
|
||||
from homeassistant.components.tts.helper import get_engine_instance
|
||||
from homeassistant.config import async_process_ha_core_config
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
from homeassistant.helpers.issue_registry import IssueRegistry, IssueSeverity
|
||||
@@ -23,6 +33,8 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import PIPELINE_DATA
|
||||
|
||||
from tests.common import async_mock_service
|
||||
from tests.components.tts.common import get_media_source_url
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
@@ -120,13 +132,13 @@ async def test_prefs_default_voice(
|
||||
assert engine is not None
|
||||
# The platform config provider will be overridden by the discovery info provider.
|
||||
assert engine.default_language == "en-US"
|
||||
assert engine.default_options == {"audio_output": "mp3", "voice": "JennyNeural"}
|
||||
assert engine.default_options == {"audio_output": "mp3"}
|
||||
|
||||
await set_cloud_prefs({"tts_default_voice": ("nl-NL", "MaartenNeural")})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert engine.default_language == "nl-NL"
|
||||
assert engine.default_options == {"audio_output": "mp3", "voice": "MaartenNeural"}
|
||||
assert engine.default_options == {"audio_output": "mp3"}
|
||||
|
||||
|
||||
async def test_deprecated_platform_config(
|
||||
@@ -228,11 +240,11 @@ async def test_get_tts_audio(
|
||||
"url": (
|
||||
"http://example.local:8123/api/tts_proxy/"
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
"path": (
|
||||
"/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
}
|
||||
await hass.async_block_till_done()
|
||||
@@ -242,6 +254,7 @@ async def test_get_tts_audio(
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == "en-US"
|
||||
assert mock_process_tts.call_args.kwargs["gender"] is None
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
|
||||
|
||||
@@ -280,11 +293,11 @@ async def test_get_tts_audio_logged_out(
|
||||
"url": (
|
||||
"http://example.local:8123/api/tts_proxy/"
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
"path": (
|
||||
"/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
}
|
||||
await hass.async_block_till_done()
|
||||
@@ -294,6 +307,7 @@ async def test_get_tts_audio_logged_out(
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == "en-US"
|
||||
assert mock_process_tts.call_args.kwargs["gender"] is None
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
|
||||
|
||||
@@ -344,11 +358,11 @@ async def test_tts_entity(
|
||||
"url": (
|
||||
"http://example.local:8123/api/tts_proxy/"
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{entity_id}.mp3"
|
||||
f"_en-us_6e8b81ac47_{entity_id}.mp3"
|
||||
),
|
||||
"path": (
|
||||
"/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_en-us_5c97d21c48_{entity_id}.mp3"
|
||||
f"_en-us_6e8b81ac47_{entity_id}.mp3"
|
||||
),
|
||||
}
|
||||
await hass.async_block_till_done()
|
||||
@@ -358,6 +372,7 @@ async def test_tts_entity(
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == "en-US"
|
||||
assert mock_process_tts.call_args.kwargs["gender"] is None
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
@@ -632,11 +647,11 @@ async def test_deprecated_gender(
|
||||
"url": (
|
||||
"http://example.local:8123/api/tts_proxy/"
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_{language.lower()}_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
"path": (
|
||||
"/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_{language.lower()}_5c97d21c48_{expected_url_suffix}.mp3"
|
||||
f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3"
|
||||
),
|
||||
}
|
||||
await hass.async_block_till_done()
|
||||
@@ -645,7 +660,7 @@ async def test_deprecated_gender(
|
||||
assert mock_process_tts.call_args is not None
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == language
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural"
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "XiaoxiaoNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
issue = issue_registry.async_get_issue("cloud", "deprecated_gender")
|
||||
assert issue is None
|
||||
@@ -662,11 +677,11 @@ async def test_deprecated_gender(
|
||||
"url": (
|
||||
"http://example.local:8123/api/tts_proxy/"
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_{language.lower()}_5dded72256_{expected_url_suffix}.mp3"
|
||||
f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3"
|
||||
),
|
||||
"path": (
|
||||
"/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491"
|
||||
f"_{language.lower()}_5dded72256_{expected_url_suffix}.mp3"
|
||||
f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3"
|
||||
),
|
||||
}
|
||||
await hass.async_block_till_done()
|
||||
@@ -678,7 +693,7 @@ async def test_deprecated_gender(
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == language
|
||||
assert mock_process_tts.call_args.kwargs["gender"] == gender_option
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "JennyNeural"
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "XiaoxiaoNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
issue = issue_registry.async_get_issue("cloud", issue_id)
|
||||
assert issue is not None
|
||||
@@ -733,3 +748,65 @@ async def test_deprecated_gender(
|
||||
}
|
||||
|
||||
assert not issue_registry.async_get_issue(DOMAIN, issue_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "service_data"),
|
||||
[
|
||||
(
|
||||
"speak",
|
||||
{
|
||||
ATTR_ENTITY_ID: "tts.home_assistant_cloud",
|
||||
ATTR_LANGUAGE: "id-ID",
|
||||
ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something",
|
||||
ATTR_MESSAGE: "There is someone at the door.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"cloud_say",
|
||||
{
|
||||
ATTR_ENTITY_ID: "media_player.something",
|
||||
ATTR_LANGUAGE: "id-ID",
|
||||
ATTR_MESSAGE: "There is someone at the door.",
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_tts_services(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
hass_client: ClientSessionGenerator,
|
||||
service: str,
|
||||
service_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test tts services."""
|
||||
calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
|
||||
mock_process_tts = AsyncMock(return_value=b"")
|
||||
cloud.voice.process_tts = mock_process_tts
|
||||
|
||||
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
await cloud.login("test-user", "test-pass")
|
||||
client = await hass_client()
|
||||
|
||||
await hass.services.async_call(
|
||||
domain=TTS_DOMAIN,
|
||||
service=service,
|
||||
service_data=service_data,
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert len(calls) == 1
|
||||
|
||||
url = await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID])
|
||||
await hass.async_block_till_done()
|
||||
response = await client.get(url)
|
||||
assert response.status == HTTPStatus.OK
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_process_tts.call_count == 1
|
||||
assert mock_process_tts.call_args is not None
|
||||
assert mock_process_tts.call_args.kwargs["text"] == "There is someone at the door."
|
||||
assert mock_process_tts.call_args.kwargs["language"] == service_data[ATTR_LANGUAGE]
|
||||
assert mock_process_tts.call_args.kwargs["voice"] == "GadisNeural"
|
||||
assert mock_process_tts.call_args.kwargs["output"] == "mp3"
|
||||
|
||||
@@ -0,0 +1,256 @@
|
||||
{
|
||||
"node_id": 36,
|
||||
"date_commissioned": "2024-03-27T17:31:23.745932",
|
||||
"last_interview": "2024-03-27T17:31:23.745939",
|
||||
"interview_version": 6,
|
||||
"available": true,
|
||||
"is_bridge": false,
|
||||
"attributes": {
|
||||
"0/29/0": [
|
||||
{
|
||||
"0": 22,
|
||||
"1": 1
|
||||
}
|
||||
],
|
||||
"0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63],
|
||||
"0/29/2": [],
|
||||
"0/29/3": [1, 2],
|
||||
"0/29/65532": 0,
|
||||
"0/29/65533": 2,
|
||||
"0/29/65528": [],
|
||||
"0/29/65529": [],
|
||||
"0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/31/0": [
|
||||
{
|
||||
"254": 5
|
||||
},
|
||||
{
|
||||
"1": 5,
|
||||
"2": 2,
|
||||
"3": [112233],
|
||||
"4": null,
|
||||
"254": 6
|
||||
}
|
||||
],
|
||||
"0/31/1": [],
|
||||
"0/31/2": 4,
|
||||
"0/31/3": 3,
|
||||
"0/31/4": 4,
|
||||
"0/31/65532": 0,
|
||||
"0/31/65533": 1,
|
||||
"0/31/65528": [],
|
||||
"0/31/65529": [],
|
||||
"0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/40/0": 17,
|
||||
"0/40/1": "TEST_VENDOR",
|
||||
"0/40/2": 65521,
|
||||
"0/40/3": "Room AirConditioner",
|
||||
"0/40/4": 32774,
|
||||
"0/40/5": "",
|
||||
"0/40/6": "**REDACTED**",
|
||||
"0/40/7": 0,
|
||||
"0/40/8": "TEST_VERSION",
|
||||
"0/40/9": 1,
|
||||
"0/40/10": "1.0",
|
||||
"0/40/11": "20200101",
|
||||
"0/40/12": "",
|
||||
"0/40/13": "",
|
||||
"0/40/14": "",
|
||||
"0/40/15": "TEST_SN",
|
||||
"0/40/16": false,
|
||||
"0/40/18": "E47F334E22A56610",
|
||||
"0/40/19": {
|
||||
"0": 3,
|
||||
"1": 3
|
||||
},
|
||||
"0/40/65532": 0,
|
||||
"0/40/65533": 1,
|
||||
"0/40/65528": [],
|
||||
"0/40/65529": [],
|
||||
"0/40/65531": [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 65528,
|
||||
65529, 65531, 65532, 65533
|
||||
],
|
||||
"0/48/0": 0,
|
||||
"0/48/1": {
|
||||
"0": 60,
|
||||
"1": 900
|
||||
},
|
||||
"0/48/2": 0,
|
||||
"0/48/3": 0,
|
||||
"0/48/4": true,
|
||||
"0/48/65532": 0,
|
||||
"0/48/65533": 1,
|
||||
"0/48/65528": [1, 3, 5],
|
||||
"0/48/65529": [0, 2, 4],
|
||||
"0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/49/0": 0,
|
||||
"0/49/1": null,
|
||||
"0/49/2": 0,
|
||||
"0/49/3": 0,
|
||||
"0/49/4": false,
|
||||
"0/49/5": 0,
|
||||
"0/49/6": "",
|
||||
"0/49/7": 0,
|
||||
"0/49/65532": 2,
|
||||
"0/49/65533": 1,
|
||||
"0/49/65528": [1, 5, 7],
|
||||
"0/49/65529": [0, 2, 3, 4, 6, 8],
|
||||
"0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/51/0": [],
|
||||
"0/51/1": 0,
|
||||
"0/51/8": false,
|
||||
"0/51/65532": 0,
|
||||
"0/51/65533": 1,
|
||||
"0/51/65528": [],
|
||||
"0/51/65529": [],
|
||||
"0/51/65531": [0, 1, 8, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/60/0": 0,
|
||||
"0/60/1": null,
|
||||
"0/60/2": null,
|
||||
"0/60/65532": 0,
|
||||
"0/60/65533": 1,
|
||||
"0/60/65528": [],
|
||||
"0/60/65529": [0, 1, 2],
|
||||
"0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/62/0": [
|
||||
{
|
||||
"254": 5
|
||||
},
|
||||
{
|
||||
"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRJBgkBwEkCAEwCUEE7pKHHHlljFuw2MAQJFOAzVR5tPPIXOjxHrLr7el8KqThQ6CuCFwdmNztUaIQgBcPZm6QRoEn6OGoFoAG8vB0KTcKNQEoARgkAgE2AwQCBAEYMAQUEvPPXEC80Bhik9ZDF3HK0Jo0RG0wBRQ2kjqIaJL5W4CHyhTHPUFcjBrNmxgwC0BJN+cSZw9fkFlIZGzsfS4WYFxzouEZ6LXLjqJXqwhi6uoQqoEhHPITp6sQ8u1ZF7OuQ35q0tZBwt84ZvAo+i59GA==",
|
||||
"2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEB0u1A8srBwhdMy9S5+W8C38qv6l9JxhOaVO1E8f3FHDpv6eTSEDWXvUKEOxZcce5cGUF/9tdW2z5M+pwjt2B9jcKNQEpARgkAmAwBBQ2kjqIaJL5W4CHyhTHPUFcjBrNmzAFFJOvH2V2J30vUkl3ZbhqhwBP2wVXGDALQJHZ9heIDcBg2DGc2b18rirq/5aZ2rsyP9BAE1zeTqSYj/pqKyeMS+hCx69jOqh/eAeDpeAzvL7JmKVLB0JLV1sY",
|
||||
"254": 6
|
||||
}
|
||||
],
|
||||
"0/62/1": [
|
||||
{
|
||||
"1": "BER19ZLOakFRLvKKC9VsWzN+xv5V5yHHBFdX7ip/cNhnzVfnaNLLHKGU/DtcNZtU/YH+8kUcWKYvknk1TCcrG4k=",
|
||||
"2": 24582,
|
||||
"3": 9865010379846957597,
|
||||
"4": 3118002441518404838,
|
||||
"5": "",
|
||||
"254": 5
|
||||
},
|
||||
{
|
||||
"1": "BJUrvCXfXiwdfapIXt1qCtJoem+s2gZJ2KBDQZcPVP1cAYECu6Fjjz2MhMy6OW8ASGmWuke+YavIzIZWYEd6BJU=",
|
||||
"2": 4939,
|
||||
"3": 2,
|
||||
"4": 36,
|
||||
"5": "",
|
||||
"254": 6
|
||||
}
|
||||
],
|
||||
"0/62/2": 5,
|
||||
"0/62/3": 2,
|
||||
"0/62/4": [
|
||||
"FTABAQEkAgE3AycU3rGzlMtTrxYYJgQAus0sJgUAwGVSNwYnFN6xs5TLU68WGCQHASQIATAJQQREdfWSzmpBUS7yigvVbFszfsb+VechxwRXV+4qf3DYZ81X52jSyxyhlPw7XDWbVP2B/vJFHFimL5J5NUwnKxuJNwo1ASkBGCQCYDAEFMurIH6818tAIcTnwEZO5c+1WAH8MAUUy6sgfrzXy0AhxOfARk7lz7VYAfwYMAtAM2db17wMsM+JMtR4c2Iaz8nHLI4mVbsPGILOBujrzguB2C7p8Q9x8Cw0NgJP7hDV52F9j7IfHjO37aXZA4LqqBg=",
|
||||
"FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEElSu8Jd9eLB19qkhe3WoK0mh6b6zaBknYoENBlw9U/VwBgQK7oWOPPYyEzLo5bwBIaZa6R75hq8jMhlZgR3oElTcKNQEpARgkAmAwBBSTrx9ldid9L1JJd2W4aocAT9sFVzAFFJOvH2V2J30vUkl3ZbhqhwBP2wVXGDALQPMYkhQcsrqT5v1vgN1LXJr9skDJ6nnuG0QWfs8SVODLGjU73iO1aQVq+Ir5et9RTD/4VrfnI63DW9RA0N+qgCkY"
|
||||
],
|
||||
"0/62/5": 6,
|
||||
"0/62/65532": 0,
|
||||
"0/62/65533": 1,
|
||||
"0/62/65528": [1, 3, 5, 8],
|
||||
"0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11],
|
||||
"0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533],
|
||||
"0/63/0": [],
|
||||
"0/63/1": [],
|
||||
"0/63/2": 4,
|
||||
"0/63/3": 3,
|
||||
"0/63/65532": 0,
|
||||
"0/63/65533": 2,
|
||||
"0/63/65528": [2, 5],
|
||||
"0/63/65529": [0, 1, 3, 4],
|
||||
"0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/3/0": 0,
|
||||
"1/3/1": 0,
|
||||
"1/3/65532": 0,
|
||||
"1/3/65533": 4,
|
||||
"1/3/65528": [],
|
||||
"1/3/65529": [0, 64],
|
||||
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/6/0": false,
|
||||
"1/6/65532": 0,
|
||||
"1/6/65533": 5,
|
||||
"1/6/65528": [],
|
||||
"1/6/65529": [0, 1, 2],
|
||||
"1/6/65531": [0, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/29/0": [
|
||||
{
|
||||
"0": 114,
|
||||
"1": 1
|
||||
}
|
||||
],
|
||||
"1/29/1": [3, 6, 29, 513, 514],
|
||||
"1/29/2": [],
|
||||
"1/29/3": [2],
|
||||
"1/29/65532": 0,
|
||||
"1/29/65533": 2,
|
||||
"1/29/65528": [],
|
||||
"1/29/65529": [],
|
||||
"1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/513/0": 2000,
|
||||
"1/513/3": 1600,
|
||||
"1/513/4": 3200,
|
||||
"1/513/5": 1600,
|
||||
"1/513/6": 3200,
|
||||
"1/513/17": 2600,
|
||||
"1/513/18": 2000,
|
||||
"1/513/25": 0,
|
||||
"1/513/27": 4,
|
||||
"1/513/28": 1,
|
||||
"1/513/65532": 35,
|
||||
"1/513/65533": 6,
|
||||
"1/513/65528": [],
|
||||
"1/513/65529": [0],
|
||||
"1/513/65531": [
|
||||
0, 3, 4, 5, 6, 17, 18, 25, 27, 28, 65528, 65529, 65531, 65532, 65533
|
||||
],
|
||||
"1/514/0": 0,
|
||||
"1/514/1": 2,
|
||||
"1/514/2": 0,
|
||||
"1/514/3": 0,
|
||||
"1/514/4": 3,
|
||||
"1/514/5": 0,
|
||||
"1/514/6": 0,
|
||||
"1/514/9": 1,
|
||||
"1/514/10": 0,
|
||||
"1/514/65532": 11,
|
||||
"1/514/65533": 4,
|
||||
"1/514/65528": [],
|
||||
"1/514/65529": [],
|
||||
"1/514/65531": [
|
||||
0, 1, 2, 3, 4, 5, 6, 9, 10, 65528, 65529, 65531, 65532, 65533
|
||||
],
|
||||
"2/3/0": 0,
|
||||
"2/3/1": 0,
|
||||
"2/3/65532": 0,
|
||||
"2/3/65533": 4,
|
||||
"2/3/65528": [],
|
||||
"2/3/65529": [0, 64],
|
||||
"2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
|
||||
"2/29/0": [
|
||||
{
|
||||
"0": 770,
|
||||
"1": 1
|
||||
}
|
||||
],
|
||||
"2/29/1": [3, 29, 1026],
|
||||
"2/29/2": [],
|
||||
"2/29/3": [],
|
||||
"2/29/65532": 0,
|
||||
"2/29/65533": 2,
|
||||
"2/29/65528": [],
|
||||
"2/29/65529": [],
|
||||
"2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
|
||||
"2/1026/0": 0,
|
||||
"2/1026/1": -500,
|
||||
"2/1026/2": 6000,
|
||||
"2/1026/65532": 0,
|
||||
"2/1026/65533": 1,
|
||||
"2/1026/65528": [],
|
||||
"2/1026/65529": [],
|
||||
"2/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533]
|
||||
},
|
||||
"attribute_subscriptions": []
|
||||
}
|
||||
@@ -25,6 +25,16 @@ async def thermostat_fixture(
|
||||
return await setup_integration_with_node_fixture(hass, "thermostat", matter_client)
|
||||
|
||||
|
||||
@pytest.fixture(name="room_airconditioner")
|
||||
async def room_airconditioner(
|
||||
hass: HomeAssistant, matter_client: MagicMock
|
||||
) -> MatterNode:
|
||||
"""Fixture for a room air conditioner node."""
|
||||
return await setup_integration_with_node_fixture(
|
||||
hass, "room-airconditioner", matter_client
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_thermostat(
|
||||
@@ -387,3 +397,18 @@ async def test_thermostat(
|
||||
clusters.Thermostat.Enums.SetpointAdjustMode.kCool, -40
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_room_airconditioner(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
room_airconditioner: MatterNode,
|
||||
) -> None:
|
||||
"""Test if a climate entity is created for a Room Airconditioner device."""
|
||||
state = hass.states.get("climate.room_airconditioner")
|
||||
assert state
|
||||
assert state.attributes["current_temperature"] == 20
|
||||
assert state.attributes["min_temp"] == 16
|
||||
assert state.attributes["max_temp"] == 32
|
||||
|
||||
@@ -0,0 +1,838 @@
|
||||
"""Models for SQLAlchemy.
|
||||
|
||||
This file contains the model definitions for schema version 42.
|
||||
It is used to test the schema migration logic.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Self, cast
|
||||
|
||||
import ciso8601
|
||||
from fnv_hash_fast import fnv1a_32
|
||||
from sqlalchemy import (
|
||||
CHAR,
|
||||
JSON,
|
||||
BigInteger,
|
||||
Boolean,
|
||||
ColumnElement,
|
||||
DateTime,
|
||||
Float,
|
||||
ForeignKey,
|
||||
Identity,
|
||||
Index,
|
||||
Integer,
|
||||
LargeBinary,
|
||||
SmallInteger,
|
||||
String,
|
||||
Text,
|
||||
case,
|
||||
type_coerce,
|
||||
)
|
||||
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
|
||||
from sqlalchemy.engine.interfaces import Dialect
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
from homeassistant.components.recorder.const import (
|
||||
ALL_DOMAIN_EXCLUDE_ATTRS,
|
||||
SupportedDialect,
|
||||
)
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticDataTimestamp,
|
||||
StatisticMetaData,
|
||||
bytes_to_ulid_or_none,
|
||||
bytes_to_uuid_hex_or_none,
|
||||
datetime_to_timestamp_or_none,
|
||||
process_timestamp,
|
||||
ulid_to_bytes_or_none,
|
||||
uuid_hex_to_bytes_or_none,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
MAX_LENGTH_EVENT_EVENT_TYPE,
|
||||
MAX_LENGTH_STATE_ENTITY_ID,
|
||||
MAX_LENGTH_STATE_STATE,
|
||||
)
|
||||
from homeassistant.core import Context, Event, EventOrigin, State
|
||||
from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util.json import (
|
||||
JSON_DECODE_EXCEPTIONS,
|
||||
json_loads,
|
||||
json_loads_object,
|
||||
)
|
||||
|
||||
|
||||
# SQLAlchemy Schema
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class for tables."""
|
||||
|
||||
|
||||
SCHEMA_VERSION = 42
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TABLE_EVENTS = "events"
|
||||
TABLE_EVENT_DATA = "event_data"
|
||||
TABLE_EVENT_TYPES = "event_types"
|
||||
TABLE_STATES = "states"
|
||||
TABLE_STATE_ATTRIBUTES = "state_attributes"
|
||||
TABLE_STATES_META = "states_meta"
|
||||
TABLE_RECORDER_RUNS = "recorder_runs"
|
||||
TABLE_SCHEMA_CHANGES = "schema_changes"
|
||||
TABLE_STATISTICS = "statistics"
|
||||
TABLE_STATISTICS_META = "statistics_meta"
|
||||
TABLE_STATISTICS_RUNS = "statistics_runs"
|
||||
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
|
||||
|
||||
STATISTICS_TABLES = ("statistics", "statistics_short_term")
|
||||
|
||||
MAX_STATE_ATTRS_BYTES = 16384
|
||||
MAX_EVENT_DATA_BYTES = 32768
|
||||
|
||||
PSQL_DIALECT = SupportedDialect.POSTGRESQL
|
||||
|
||||
ALL_TABLES = [
|
||||
TABLE_STATES,
|
||||
TABLE_STATE_ATTRIBUTES,
|
||||
TABLE_EVENTS,
|
||||
TABLE_EVENT_DATA,
|
||||
TABLE_EVENT_TYPES,
|
||||
TABLE_RECORDER_RUNS,
|
||||
TABLE_SCHEMA_CHANGES,
|
||||
TABLE_STATES_META,
|
||||
TABLE_STATISTICS,
|
||||
TABLE_STATISTICS_META,
|
||||
TABLE_STATISTICS_RUNS,
|
||||
TABLE_STATISTICS_SHORT_TERM,
|
||||
]
|
||||
|
||||
TABLES_TO_CHECK = [
|
||||
TABLE_STATES,
|
||||
TABLE_EVENTS,
|
||||
TABLE_RECORDER_RUNS,
|
||||
TABLE_SCHEMA_CHANGES,
|
||||
]
|
||||
|
||||
LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts"
|
||||
METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts"
|
||||
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
|
||||
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
|
||||
LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id"
|
||||
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts"
|
||||
CONTEXT_ID_BIN_MAX_LENGTH = 16
|
||||
|
||||
MYSQL_COLLATE = "utf8mb4_unicode_ci"
|
||||
MYSQL_DEFAULT_CHARSET = "utf8mb4"
|
||||
MYSQL_ENGINE = "InnoDB"
|
||||
|
||||
_DEFAULT_TABLE_ARGS = {
|
||||
"mysql_default_charset": MYSQL_DEFAULT_CHARSET,
|
||||
"mysql_collate": MYSQL_COLLATE,
|
||||
"mysql_engine": MYSQL_ENGINE,
|
||||
"mariadb_default_charset": MYSQL_DEFAULT_CHARSET,
|
||||
"mariadb_collate": MYSQL_COLLATE,
|
||||
"mariadb_engine": MYSQL_ENGINE,
|
||||
}
|
||||
|
||||
|
||||
class UnusedDateTime(DateTime):
|
||||
"""An unused column type that behaves like a datetime."""
|
||||
|
||||
|
||||
class Unused(CHAR):
|
||||
"""An unused column type that behaves like a string."""
|
||||
|
||||
|
||||
@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call]
|
||||
@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call]
|
||||
def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
|
||||
"""Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite."""
|
||||
return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite)
|
||||
|
||||
|
||||
@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call]
|
||||
def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
|
||||
"""Compile Unused as CHAR(1) on postgresql."""
|
||||
return "CHAR(1)" # Uses 1 byte
|
||||
|
||||
|
||||
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME):
|
||||
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
|
||||
|
||||
def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def]
|
||||
"""Offload the datetime parsing to ciso8601."""
|
||||
return lambda value: None if value is None else ciso8601.parse_datetime(value)
|
||||
|
||||
|
||||
class NativeLargeBinary(LargeBinary):
|
||||
"""A faster version of LargeBinary for engines that support python bytes natively."""
|
||||
|
||||
def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def]
|
||||
"""No conversion needed for engines that support native bytes."""
|
||||
return None
|
||||
|
||||
|
||||
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
|
||||
# for sqlite and postgresql we use a bigint
|
||||
UINT_32_TYPE = BigInteger().with_variant(
|
||||
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
|
||||
"mysql",
|
||||
"mariadb",
|
||||
)
|
||||
JSON_VARIANT_CAST = Text().with_variant(
|
||||
postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call]
|
||||
"postgresql",
|
||||
)
|
||||
JSONB_VARIANT_CAST = Text().with_variant(
|
||||
postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call]
|
||||
"postgresql",
|
||||
)
|
||||
DATETIME_TYPE = (
|
||||
DateTime(timezone=True)
|
||||
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call]
|
||||
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call]
|
||||
)
|
||||
DOUBLE_TYPE = (
|
||||
Float()
|
||||
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call]
|
||||
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
|
||||
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
|
||||
)
|
||||
UNUSED_LEGACY_COLUMN = Unused(0)
|
||||
UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True)
|
||||
UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger()
|
||||
DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION"
|
||||
CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant(
|
||||
NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite"
|
||||
)
|
||||
|
||||
TIMESTAMP_TYPE = DOUBLE_TYPE
|
||||
|
||||
|
||||
class JSONLiteral(JSON):
|
||||
"""Teach SA how to literalize json."""
|
||||
|
||||
def literal_processor(self, dialect: Dialect) -> Callable[[Any], str]:
|
||||
"""Processor to convert a value to JSON."""
|
||||
|
||||
def process(value: Any) -> str:
|
||||
"""Dump json."""
|
||||
return JSON_DUMP(value)
|
||||
|
||||
return process
|
||||
|
||||
|
||||
EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
|
||||
EVENT_ORIGIN_TO_IDX = {origin: idx for idx, origin in enumerate(EVENT_ORIGIN_ORDER)}
|
||||
|
||||
|
||||
class Events(Base):
|
||||
"""Event history data."""
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching events at a specific time
|
||||
# see logbook
|
||||
Index(
|
||||
"ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts"
|
||||
),
|
||||
Index(
|
||||
EVENTS_CONTEXT_ID_BIN_INDEX,
|
||||
"context_id_bin",
|
||||
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||
),
|
||||
_DEFAULT_TABLE_ARGS,
|
||||
)
|
||||
__tablename__ = TABLE_EVENTS
|
||||
event_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
origin_idx: Mapped[int | None] = mapped_column(SmallInteger)
|
||||
time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
|
||||
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
data_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("event_data.data_id"), index=True
|
||||
)
|
||||
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
event_type_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("event_types.event_type_id")
|
||||
)
|
||||
event_data_rel: Mapped[EventData | None] = relationship("EventData")
|
||||
event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
"<recorder.Events("
|
||||
f"id={self.event_id}, event_type_id='{self.event_type_id}', "
|
||||
f"origin_idx='{self.origin_idx}', time_fired='{self._time_fired_isotime}'"
|
||||
f", data_id={self.data_id})>"
|
||||
)
|
||||
|
||||
@property
|
||||
def _time_fired_isotime(self) -> str | None:
|
||||
"""Return time_fired as an isotime string."""
|
||||
date_time: datetime | None
|
||||
if self.time_fired_ts is not None:
|
||||
date_time = dt_util.utc_from_timestamp(self.time_fired_ts)
|
||||
else:
|
||||
date_time = process_timestamp(self.time_fired)
|
||||
if date_time is None:
|
||||
return None
|
||||
return date_time.isoformat(sep=" ", timespec="seconds")
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> Events:
|
||||
"""Create an event database object from a native event."""
|
||||
return Events(
|
||||
event_type=None,
|
||||
event_data=None,
|
||||
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||
time_fired=None,
|
||||
time_fired_ts=event.time_fired_timestamp,
|
||||
context_id=None,
|
||||
context_id_bin=ulid_to_bytes_or_none(event.context.id),
|
||||
context_user_id=None,
|
||||
context_user_id_bin=uuid_hex_to_bytes_or_none(event.context.user_id),
|
||||
context_parent_id=None,
|
||||
context_parent_id_bin=ulid_to_bytes_or_none(event.context.parent_id),
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> Event | None:
|
||||
"""Convert to a native HA Event."""
|
||||
context = Context(
|
||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||
)
|
||||
try:
|
||||
return Event(
|
||||
self.event_type or "",
|
||||
json_loads_object(self.event_data) if self.event_data else {},
|
||||
EventOrigin(self.origin)
|
||||
if self.origin
|
||||
else EVENT_ORIGIN_ORDER[self.origin_idx or 0],
|
||||
dt_util.utc_from_timestamp(self.time_fired_ts or 0),
|
||||
context=context,
|
||||
)
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting to event: %s", self)
|
||||
return None
|
||||
|
||||
|
||||
class EventData(Base):
|
||||
"""Event data history."""
|
||||
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
__tablename__ = TABLE_EVENT_DATA
|
||||
data_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
|
||||
# Note that this is not named attributes to avoid confusion with the states table
|
||||
shared_data: Mapped[str | None] = mapped_column(
|
||||
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
"<recorder.EventData("
|
||||
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
|
||||
")>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def shared_data_bytes_from_event(
|
||||
event: Event, dialect: SupportedDialect | None
|
||||
) -> bytes:
|
||||
"""Create shared_data from an event."""
|
||||
if dialect == SupportedDialect.POSTGRESQL:
|
||||
bytes_result = json_bytes_strip_null(event.data)
|
||||
bytes_result = json_bytes(event.data)
|
||||
if len(bytes_result) > MAX_EVENT_DATA_BYTES:
|
||||
_LOGGER.warning(
|
||||
"Event data for %s exceed maximum size of %s bytes. "
|
||||
"This can cause database performance issues; Event data "
|
||||
"will not be stored",
|
||||
event.event_type,
|
||||
MAX_EVENT_DATA_BYTES,
|
||||
)
|
||||
return b"{}"
|
||||
return bytes_result
|
||||
|
||||
@staticmethod
|
||||
def hash_shared_data_bytes(shared_data_bytes: bytes) -> int:
|
||||
"""Return the hash of json encoded shared data."""
|
||||
return fnv1a_32(shared_data_bytes)
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to an event data dictionary."""
|
||||
shared_data = self.shared_data
|
||||
if shared_data is None:
|
||||
return {}
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(shared_data))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
_LOGGER.exception("Error converting row to event data: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class EventTypes(Base):
|
||||
"""Event type history."""
|
||||
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
__tablename__ = TABLE_EVENT_TYPES
|
||||
event_type_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
event_type: Mapped[str | None] = mapped_column(
|
||||
String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
"<recorder.EventTypes("
|
||||
f"id={self.event_type_id}, event_type='{self.event_type}'"
|
||||
")>"
|
||||
)
|
||||
|
||||
|
||||
class States(Base):
|
||||
"""State change history."""
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching the state of entities at a specific time
|
||||
# (get_states in history.py)
|
||||
Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"),
|
||||
Index(
|
||||
STATES_CONTEXT_ID_BIN_INDEX,
|
||||
"context_id_bin",
|
||||
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||
),
|
||||
_DEFAULT_TABLE_ARGS,
|
||||
)
|
||||
__tablename__ = TABLE_STATES
|
||||
state_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE))
|
||||
attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN)
|
||||
last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
|
||||
last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
last_updated_ts: Mapped[float | None] = mapped_column(
|
||||
TIMESTAMP_TYPE, default=time.time, index=True
|
||||
)
|
||||
old_state_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("states.state_id"), index=True
|
||||
)
|
||||
attributes_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("state_attributes.attributes_id"), index=True
|
||||
)
|
||||
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
|
||||
origin_idx: Mapped[int | None] = mapped_column(
|
||||
SmallInteger
|
||||
) # 0 is local, 1 is remote
|
||||
old_state: Mapped[States | None] = relationship("States", remote_side=[state_id])
|
||||
state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes")
|
||||
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
|
||||
metadata_id: Mapped[int | None] = mapped_column(
|
||||
Integer, ForeignKey("states_meta.metadata_id")
|
||||
)
|
||||
states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}'"
|
||||
f" metadata_id={self.metadata_id},"
|
||||
f" state='{self.state}', event_id='{self.event_id}',"
|
||||
f" last_updated='{self._last_updated_isotime}',"
|
||||
f" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
|
||||
)
|
||||
|
||||
@property
|
||||
def _last_updated_isotime(self) -> str | None:
|
||||
"""Return last_updated as an isotime string."""
|
||||
date_time: datetime | None
|
||||
if self.last_updated_ts is not None:
|
||||
date_time = dt_util.utc_from_timestamp(self.last_updated_ts)
|
||||
else:
|
||||
date_time = process_timestamp(self.last_updated)
|
||||
if date_time is None:
|
||||
return None
|
||||
return date_time.isoformat(sep=" ", timespec="seconds")
|
||||
|
||||
@staticmethod
|
||||
def from_event(event: Event) -> States:
|
||||
"""Create object from a state_changed event."""
|
||||
entity_id = event.data["entity_id"]
|
||||
state: State | None = event.data.get("new_state")
|
||||
dbstate = States(
|
||||
entity_id=entity_id,
|
||||
attributes=None,
|
||||
context_id=None,
|
||||
context_id_bin=ulid_to_bytes_or_none(event.context.id),
|
||||
context_user_id=None,
|
||||
context_user_id_bin=uuid_hex_to_bytes_or_none(event.context.user_id),
|
||||
context_parent_id=None,
|
||||
context_parent_id_bin=ulid_to_bytes_or_none(event.context.parent_id),
|
||||
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||
last_updated=None,
|
||||
last_changed=None,
|
||||
)
|
||||
# None state means the state was removed from the state machine
|
||||
if state is None:
|
||||
dbstate.state = ""
|
||||
dbstate.last_updated_ts = event.time_fired_timestamp
|
||||
dbstate.last_changed_ts = None
|
||||
return dbstate
|
||||
|
||||
dbstate.state = state.state
|
||||
dbstate.last_updated_ts = state.last_updated_timestamp
|
||||
if state.last_updated == state.last_changed:
|
||||
dbstate.last_changed_ts = None
|
||||
else:
|
||||
dbstate.last_changed_ts = state.last_changed_timestamp
|
||||
|
||||
return dbstate
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> State | None:
|
||||
"""Convert to an HA state object."""
|
||||
context = Context(
|
||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||
)
|
||||
try:
|
||||
attrs = json_loads_object(self.attributes) if self.attributes else {}
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state: %s", self)
|
||||
return None
|
||||
if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts:
|
||||
last_changed = last_updated = dt_util.utc_from_timestamp(
|
||||
self.last_updated_ts or 0
|
||||
)
|
||||
else:
|
||||
last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0)
|
||||
last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0)
|
||||
return State(
|
||||
self.entity_id or "",
|
||||
self.state, # type: ignore[arg-type]
|
||||
# Join the state_attributes table on attributes_id to get the attributes
|
||||
# for newer states
|
||||
attrs,
|
||||
last_changed,
|
||||
last_updated,
|
||||
context=context,
|
||||
validate_entity_id=validate_entity_id,
|
||||
)
|
||||
|
||||
|
||||
class StateAttributes(Base):
|
||||
"""State attribute change history."""
|
||||
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
__tablename__ = TABLE_STATE_ATTRIBUTES
|
||||
attributes_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
|
||||
# Note that this is not named attributes to avoid confusion with the states table
|
||||
shared_attrs: Mapped[str | None] = mapped_column(
|
||||
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
|
||||
f" attributes='{self.shared_attrs}')>"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def shared_attrs_bytes_from_event(
|
||||
event: Event,
|
||||
dialect: SupportedDialect | None,
|
||||
) -> bytes:
|
||||
"""Create shared_attrs from a state_changed event."""
|
||||
state: State | None = event.data.get("new_state")
|
||||
# None state means the state was removed from the state machine
|
||||
if state is None:
|
||||
return b"{}"
|
||||
if state_info := state.state_info:
|
||||
exclude_attrs = {
|
||||
*ALL_DOMAIN_EXCLUDE_ATTRS,
|
||||
*state_info["unrecorded_attributes"],
|
||||
}
|
||||
else:
|
||||
exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS
|
||||
encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes
|
||||
bytes_result = encoder(
|
||||
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
||||
)
|
||||
if len(bytes_result) > MAX_STATE_ATTRS_BYTES:
|
||||
_LOGGER.warning(
|
||||
"State attributes for %s exceed maximum size of %s bytes. "
|
||||
"This can cause database performance issues; Attributes "
|
||||
"will not be stored",
|
||||
state.entity_id,
|
||||
MAX_STATE_ATTRS_BYTES,
|
||||
)
|
||||
return b"{}"
|
||||
return bytes_result
|
||||
|
||||
@staticmethod
|
||||
def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int:
|
||||
"""Return the hash of json encoded shared attributes."""
|
||||
return fnv1a_32(shared_attrs_bytes)
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to a state attributes dictionary."""
|
||||
shared_attrs = self.shared_attrs
|
||||
if shared_attrs is None:
|
||||
return {}
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(shared_attrs))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state attributes: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class StatesMeta(Base):
|
||||
"""Metadata for states."""
|
||||
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
__tablename__ = TABLE_STATES_META
|
||||
metadata_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
entity_id: Mapped[str | None] = mapped_column(
|
||||
String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
"<recorder.StatesMeta("
|
||||
f"id={self.metadata_id}, entity_id='{self.entity_id}'"
|
||||
")>"
|
||||
)
|
||||
|
||||
|
||||
class StatisticsBase:
|
||||
"""Statistics base class."""
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time)
|
||||
metadata_id: Mapped[int | None] = mapped_column(
|
||||
Integer,
|
||||
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
|
||||
)
|
||||
start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
|
||||
mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||
min: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||
max: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||
last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
|
||||
last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
|
||||
state: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||
sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
|
||||
|
||||
duration: timedelta
|
||||
|
||||
@classmethod
|
||||
def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self:
|
||||
"""Create object from a statistics with datatime objects."""
|
||||
return cls( # type: ignore[call-arg]
|
||||
metadata_id=metadata_id,
|
||||
created=None,
|
||||
created_ts=time.time(),
|
||||
start=None,
|
||||
start_ts=dt_util.utc_to_timestamp(stats["start"]),
|
||||
mean=stats.get("mean"),
|
||||
min=stats.get("min"),
|
||||
max=stats.get("max"),
|
||||
last_reset=None,
|
||||
last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")),
|
||||
state=stats.get("state"),
|
||||
sum=stats.get("sum"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self:
|
||||
"""Create object from a statistics with timestamps."""
|
||||
return cls( # type: ignore[call-arg]
|
||||
metadata_id=metadata_id,
|
||||
created=None,
|
||||
created_ts=time.time(),
|
||||
start=None,
|
||||
start_ts=stats["start_ts"],
|
||||
mean=stats.get("mean"),
|
||||
min=stats.get("min"),
|
||||
max=stats.get("max"),
|
||||
last_reset=None,
|
||||
last_reset_ts=stats.get("last_reset_ts"),
|
||||
state=stats.get("state"),
|
||||
sum=stats.get("sum"),
|
||||
)
|
||||
|
||||
|
||||
class Statistics(Base, StatisticsBase):
|
||||
"""Long term statistics."""
|
||||
|
||||
duration = timedelta(hours=1)
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching statistics for a certain entity at a specific time
|
||||
Index(
|
||||
"ix_statistics_statistic_id_start_ts",
|
||||
"metadata_id",
|
||||
"start_ts",
|
||||
unique=True,
|
||||
),
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS
|
||||
|
||||
|
||||
class StatisticsShortTerm(Base, StatisticsBase):
|
||||
"""Short term statistics."""
|
||||
|
||||
duration = timedelta(minutes=5)
|
||||
|
||||
__table_args__ = (
|
||||
# Used for fetching statistics for a certain entity at a specific time
|
||||
Index(
|
||||
"ix_statistics_short_term_statistic_id_start_ts",
|
||||
"metadata_id",
|
||||
"start_ts",
|
||||
unique=True,
|
||||
),
|
||||
)
|
||||
__tablename__ = TABLE_STATISTICS_SHORT_TERM
|
||||
|
||||
|
||||
class StatisticsMeta(Base):
|
||||
"""Statistics meta data."""
|
||||
|
||||
__table_args__ = (_DEFAULT_TABLE_ARGS,)
|
||||
__tablename__ = TABLE_STATISTICS_META
|
||||
id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
statistic_id: Mapped[str | None] = mapped_column(
|
||||
String(255), index=True, unique=True
|
||||
)
|
||||
source: Mapped[str | None] = mapped_column(String(32))
|
||||
unit_of_measurement: Mapped[str | None] = mapped_column(String(255))
|
||||
has_mean: Mapped[bool | None] = mapped_column(Boolean)
|
||||
has_sum: Mapped[bool | None] = mapped_column(Boolean)
|
||||
name: Mapped[str | None] = mapped_column(String(255))
|
||||
|
||||
@staticmethod
|
||||
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
||||
"""Create object from meta data."""
|
||||
return StatisticsMeta(**meta)
|
||||
|
||||
|
||||
class RecorderRuns(Base):
|
||||
"""Representation of recorder run."""
|
||||
|
||||
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
|
||||
__tablename__ = TABLE_RECORDER_RUNS
|
||||
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE)
|
||||
closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
end = (
|
||||
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
|
||||
)
|
||||
return (
|
||||
f"<recorder.RecorderRuns(id={self.run_id},"
|
||||
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
|
||||
f" closed_incorrect={self.closed_incorrect},"
|
||||
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> Self:
|
||||
"""Return self, native format is this model."""
|
||||
return self
|
||||
|
||||
|
||||
class SchemaChanges(Base):
|
||||
"""Representation of schema version changes."""
|
||||
|
||||
__tablename__ = TABLE_SCHEMA_CHANGES
|
||||
change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
schema_version: Mapped[int | None] = mapped_column(Integer)
|
||||
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
"<recorder.SchemaChanges("
|
||||
f"id={self.change_id}, schema_version={self.schema_version}, "
|
||||
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
|
||||
")>"
|
||||
)
|
||||
|
||||
|
||||
class StatisticsRuns(Base):
|
||||
"""Representation of statistics run."""
|
||||
|
||||
__tablename__ = TABLE_STATISTICS_RUNS
|
||||
run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True)
|
||||
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of instance for debugging."""
|
||||
return (
|
||||
f"<recorder.StatisticsRuns(id={self.run_id},"
|
||||
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
|
||||
)
|
||||
|
||||
|
||||
EVENT_DATA_JSON = type_coerce(
|
||||
EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
|
||||
)
|
||||
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
|
||||
Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
|
||||
)
|
||||
|
||||
SHARED_ATTRS_JSON = type_coerce(
|
||||
StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
OLD_FORMAT_ATTRS_JSON = type_coerce(
|
||||
States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
|
||||
ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"]
|
||||
OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
|
||||
DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"]
|
||||
OLD_STATE = aliased(States, name="old_state")
|
||||
|
||||
SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case(
|
||||
(StateAttributes.shared_attrs.is_(None), States.attributes),
|
||||
else_=StateAttributes.shared_attrs,
|
||||
).label("attributes")
|
||||
SHARED_DATA_OR_LEGACY_EVENT_DATA = case(
|
||||
(EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data
|
||||
).label("event_data")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -496,11 +496,14 @@ async def test_search(
|
||||
ItemType.SCRIPT: {script_scene_entity.entity_id, "script.nested"},
|
||||
}
|
||||
assert search(ItemType.AREA, living_room_area.id) == {
|
||||
ItemType.AUTOMATION: {"automation.wled_device"},
|
||||
ItemType.AUTOMATION: {"automation.wled_device", "automation.wled_entity"},
|
||||
ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id},
|
||||
ItemType.DEVICE: {wled_device.id},
|
||||
ItemType.ENTITY: {wled_segment_1_entity.entity_id},
|
||||
ItemType.FLOOR: {first_floor.floor_id},
|
||||
ItemType.GROUP: {"group.wled", "group.wled_hue"},
|
||||
ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id},
|
||||
ItemType.SCRIPT: {"script.wled"},
|
||||
}
|
||||
assert search(ItemType.AREA, kitchen_area.id) == {
|
||||
ItemType.AUTOMATION: {"automation.area"},
|
||||
@@ -511,7 +514,9 @@ async def test_search(
|
||||
hue_segment_2_entity.entity_id,
|
||||
},
|
||||
ItemType.FLOOR: {first_floor.floor_id},
|
||||
ItemType.SCRIPT: {"script.area", "script.device"},
|
||||
ItemType.GROUP: {"group.hue", "group.wled_hue"},
|
||||
ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id},
|
||||
ItemType.SCRIPT: {"script.area", "script.device", "script.hue"},
|
||||
}
|
||||
|
||||
assert not search(ItemType.AUTOMATION, "automation.unknown")
|
||||
@@ -726,6 +731,7 @@ async def test_search(
|
||||
"automation.area",
|
||||
"automation.floor",
|
||||
"automation.wled_device",
|
||||
"automation.wled_entity",
|
||||
},
|
||||
ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id, wled_config_entry.entry_id},
|
||||
ItemType.DEVICE: {hue_device.id, wled_device.id},
|
||||
@@ -734,7 +740,19 @@ async def test_search(
|
||||
hue_segment_1_entity.entity_id,
|
||||
hue_segment_2_entity.entity_id,
|
||||
},
|
||||
ItemType.SCRIPT: {"script.device", "script.area", "script.floor"},
|
||||
ItemType.GROUP: {"group.hue", "group.wled", "group.wled_hue"},
|
||||
ItemType.SCENE: {
|
||||
"scene.scene_hue_seg_1",
|
||||
"scene.scene_wled_seg_1",
|
||||
scene_wled_hue_entity.entity_id,
|
||||
},
|
||||
ItemType.SCRIPT: {
|
||||
"script.device",
|
||||
"script.area",
|
||||
"script.floor",
|
||||
"script.hue",
|
||||
"script.wled",
|
||||
},
|
||||
}
|
||||
assert search(ItemType.FLOOR, second_floor.floor_id) == {
|
||||
ItemType.AREA: {bedroom_area.id},
|
||||
|
||||
@@ -40,6 +40,7 @@ from homeassistant.const import (
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfEnergy,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import CoreState, HomeAssistant, State
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -553,8 +554,66 @@ async def test_entity_name(hass: HomeAssistant, yaml_config, entity_id, name) ->
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"energy_sensor_attributes",
|
||||
"gas_sensor_attributes",
|
||||
"energy_meter_attributes",
|
||||
"gas_meter_attributes",
|
||||
),
|
||||
[
|
||||
(
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR},
|
||||
{ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit"},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: SensorDeviceClass.ENERGY,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit",
|
||||
},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: None,
|
||||
},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
ATTR_DEVICE_CLASS: SensorDeviceClass.GAS,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: SensorDeviceClass.WATER,
|
||||
ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit",
|
||||
},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: SensorDeviceClass.GAS,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
},
|
||||
{
|
||||
ATTR_DEVICE_CLASS: SensorDeviceClass.WATER,
|
||||
ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit",
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_device_class(
|
||||
hass: HomeAssistant, yaml_config, config_entry_configs
|
||||
hass: HomeAssistant,
|
||||
yaml_config,
|
||||
config_entry_configs,
|
||||
energy_sensor_attributes,
|
||||
gas_sensor_attributes,
|
||||
energy_meter_attributes,
|
||||
gas_meter_attributes,
|
||||
) -> None:
|
||||
"""Test utility device_class."""
|
||||
if yaml_config:
|
||||
@@ -579,27 +638,23 @@ async def test_device_class(
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.states.async_set(
|
||||
entity_id_energy, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
||||
)
|
||||
hass.states.async_set(
|
||||
entity_id_gas, 2, {ATTR_UNIT_OF_MEASUREMENT: "some_archaic_unit"}
|
||||
)
|
||||
hass.states.async_set(entity_id_energy, 2, energy_sensor_attributes)
|
||||
hass.states.async_set(entity_id_gas, 2, gas_sensor_attributes)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("sensor.energy_meter")
|
||||
assert state is not None
|
||||
assert state.state == "0"
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY
|
||||
assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR
|
||||
for attr, value in energy_meter_attributes.items():
|
||||
assert state.attributes.get(attr) == value
|
||||
|
||||
state = hass.states.get("sensor.gas_meter")
|
||||
assert state is not None
|
||||
assert state.state == "0"
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) is None
|
||||
assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "some_archaic_unit"
|
||||
for attr, value in gas_meter_attributes.items():
|
||||
assert state.attributes.get(attr) == value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -610,7 +665,13 @@ async def test_device_class(
|
||||
"utility_meter": {
|
||||
"energy_bill": {
|
||||
"source": "sensor.energy",
|
||||
"tariffs": ["onpeak", "midpeak", "offpeak", "superpeak"],
|
||||
"tariffs": [
|
||||
"tariff0",
|
||||
"tariff1",
|
||||
"tariff2",
|
||||
"tariff3",
|
||||
"tariff4",
|
||||
],
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -626,7 +687,13 @@ async def test_device_class(
|
||||
"offset": 0,
|
||||
"periodically_resetting": True,
|
||||
"source": "sensor.energy",
|
||||
"tariffs": ["onpeak", "midpeak", "offpeak", "superpeak"],
|
||||
"tariffs": [
|
||||
"tariff0",
|
||||
"tariff1",
|
||||
"tariff2",
|
||||
"tariff3",
|
||||
"tariff4",
|
||||
],
|
||||
},
|
||||
),
|
||||
],
|
||||
@@ -638,82 +705,115 @@ async def test_restore_state(
|
||||
# Home assistant is not runnit yet
|
||||
hass.set_state(CoreState.not_running)
|
||||
|
||||
last_reset = "2020-12-21T00:00:00.013073+00:00"
|
||||
last_reset_1 = "2020-12-21T00:00:00.013073+00:00"
|
||||
last_reset_2 = "2020-12-22T00:00:00.013073+00:00"
|
||||
|
||||
mock_restore_cache_with_extra_data(
|
||||
hass,
|
||||
[
|
||||
# sensor.energy_bill_tariff0 is restored as expected, including device
|
||||
# class
|
||||
(
|
||||
State(
|
||||
"sensor.energy_bill_onpeak",
|
||||
"3",
|
||||
"sensor.energy_bill_tariff0",
|
||||
"0.1",
|
||||
attributes={
|
||||
ATTR_STATUS: PAUSED,
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
ATTR_LAST_RESET: last_reset_1,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfVolume.CUBIC_METERS,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": {
|
||||
"__type": "<class 'decimal.Decimal'>",
|
||||
"decimal_str": "3",
|
||||
"decimal_str": "0.2",
|
||||
},
|
||||
"native_unit_of_measurement": "gal",
|
||||
"last_reset": last_reset_2,
|
||||
"last_period": "1.3",
|
||||
"last_valid_state": None,
|
||||
"status": "collecting",
|
||||
"input_device_class": "water",
|
||||
},
|
||||
),
|
||||
# sensor.energy_bill_tariff1 is restored as expected, except device
|
||||
# class
|
||||
(
|
||||
State(
|
||||
"sensor.energy_bill_tariff1",
|
||||
"1.1",
|
||||
attributes={
|
||||
ATTR_STATUS: PAUSED,
|
||||
ATTR_LAST_RESET: last_reset_1,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": {
|
||||
"__type": "<class 'decimal.Decimal'>",
|
||||
"decimal_str": "1.2",
|
||||
},
|
||||
"native_unit_of_measurement": "kWh",
|
||||
"last_reset": last_reset,
|
||||
"last_period": "7",
|
||||
"last_valid_state": "None",
|
||||
"last_reset": last_reset_2,
|
||||
"last_period": "1.3",
|
||||
"last_valid_state": None,
|
||||
"status": "paused",
|
||||
},
|
||||
),
|
||||
# sensor.energy_bill_tariff2 has missing keys and falls back to
|
||||
# saved state
|
||||
(
|
||||
State(
|
||||
"sensor.energy_bill_midpeak",
|
||||
"5",
|
||||
"sensor.energy_bill_tariff2",
|
||||
"2.1",
|
||||
attributes={
|
||||
ATTR_STATUS: PAUSED,
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_LAST_RESET: last_reset_1,
|
||||
ATTR_LAST_VALID_STATE: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": {
|
||||
"__type": "<class 'decimal.Decimal'>",
|
||||
"decimal_str": "3",
|
||||
"decimal_str": "2.2",
|
||||
},
|
||||
"native_unit_of_measurement": "kWh",
|
||||
"last_valid_state": "None",
|
||||
},
|
||||
),
|
||||
# sensor.energy_bill_tariff3 has invalid data and falls back to
|
||||
# saved state
|
||||
(
|
||||
State(
|
||||
"sensor.energy_bill_offpeak",
|
||||
"6",
|
||||
"sensor.energy_bill_tariff3",
|
||||
"3.1",
|
||||
attributes={
|
||||
ATTR_STATUS: COLLECTING,
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_LAST_RESET: last_reset_1,
|
||||
ATTR_LAST_VALID_STATE: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
},
|
||||
),
|
||||
{
|
||||
"native_value": {
|
||||
"__type": "<class 'decimal.Decimal'>",
|
||||
"decimal_str": "3f",
|
||||
"decimal_str": "3f", # Invalid
|
||||
},
|
||||
"native_unit_of_measurement": "kWh",
|
||||
"last_valid_state": "None",
|
||||
},
|
||||
),
|
||||
# No extra saved data, fall back to saved state
|
||||
(
|
||||
State(
|
||||
"sensor.energy_bill_superpeak",
|
||||
"sensor.energy_bill_tariff4",
|
||||
"error",
|
||||
attributes={
|
||||
ATTR_STATUS: COLLECTING,
|
||||
ATTR_LAST_RESET: last_reset,
|
||||
ATTR_LAST_RESET: last_reset_1,
|
||||
ATTR_LAST_VALID_STATE: None,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR,
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
},
|
||||
),
|
||||
{},
|
||||
@@ -736,41 +836,60 @@ async def test_restore_state(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# restore from cache
|
||||
state = hass.states.get("sensor.energy_bill_onpeak")
|
||||
assert state.state == "3"
|
||||
assert state.attributes.get("status") == PAUSED
|
||||
assert state.attributes.get("last_reset") == last_reset
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_midpeak")
|
||||
assert state.state == "5"
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_offpeak")
|
||||
assert state.state == "6"
|
||||
state = hass.states.get("sensor.energy_bill_tariff0")
|
||||
assert state.state == "0.2"
|
||||
assert state.attributes.get("status") == COLLECTING
|
||||
assert state.attributes.get("last_reset") == last_reset
|
||||
assert state.attributes.get("last_reset") == last_reset_2
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfVolume.GALLONS
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_tariff1")
|
||||
assert state.state == "1.2"
|
||||
assert state.attributes.get("status") == PAUSED
|
||||
assert state.attributes.get("last_reset") == last_reset_2
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_superpeak")
|
||||
state = hass.states.get("sensor.energy_bill_tariff2")
|
||||
assert state.state == "2.1"
|
||||
assert state.attributes.get("status") == PAUSED
|
||||
assert state.attributes.get("last_reset") == last_reset_1
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_tariff3")
|
||||
assert state.state == "3.1"
|
||||
assert state.attributes.get("status") == COLLECTING
|
||||
assert state.attributes.get("last_reset") == last_reset_1
|
||||
assert state.attributes.get("last_valid_state") == "None"
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR
|
||||
assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_tariff4")
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
# utility_meter is loaded, now set sensors according to utility_meter:
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("select.energy_bill")
|
||||
assert state.state == "onpeak"
|
||||
assert state.state == "tariff0"
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_onpeak")
|
||||
state = hass.states.get("sensor.energy_bill_tariff0")
|
||||
assert state.attributes.get("status") == COLLECTING
|
||||
|
||||
state = hass.states.get("sensor.energy_bill_offpeak")
|
||||
assert state.attributes.get("status") == PAUSED
|
||||
for entity_id in (
|
||||
"sensor.energy_bill_tariff1",
|
||||
"sensor.energy_bill_tariff2",
|
||||
"sensor.energy_bill_tariff3",
|
||||
"sensor.energy_bill_tariff4",
|
||||
):
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.attributes.get("status") == PAUSED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -672,6 +672,31 @@ async def test_delay_basic(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
|
||||
async def test_empty_delay(hass: HomeAssistant) -> None:
|
||||
"""Test an empty delay."""
|
||||
delay_alias = "delay step"
|
||||
sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": 0}, "alias": delay_alias})
|
||||
script_obj = script.Script(hass, sequence, "Test Name", "test_domain")
|
||||
delay_started_flag = async_watch_for_action(script_obj, delay_alias)
|
||||
|
||||
try:
|
||||
await script_obj.async_run(context=Context())
|
||||
await asyncio.wait_for(delay_started_flag.wait(), 1)
|
||||
except (AssertionError, TimeoutError):
|
||||
await script_obj.async_stop()
|
||||
raise
|
||||
else:
|
||||
await hass.async_block_till_done()
|
||||
assert not script_obj.is_running
|
||||
assert script_obj.last_action is None
|
||||
|
||||
assert_action_trace(
|
||||
{
|
||||
"0": [{"result": {"delay": 0.0, "done": True}}],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def test_multiple_runs_delay(hass: HomeAssistant) -> None:
|
||||
"""Test multiple runs with delay in script."""
|
||||
event = "test_event"
|
||||
|
||||
Reference in New Issue
Block a user