forked from home-assistant/core
Compare commits
23 Commits
2023.2.0b1
...
2023.2.0b3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
69ed30f743 | ||
|
|
d33373f6ee | ||
|
|
bedf5fe6cd | ||
|
|
29eb7e8f9e | ||
|
|
60b96f19b7 | ||
|
|
0a6ce35e30 | ||
|
|
6397cc5d04 | ||
|
|
b7311dc655 | ||
|
|
e20c7491c1 | ||
|
|
8cbefd5f97 | ||
|
|
c7665b479a | ||
|
|
4f2966674a | ||
|
|
b464179eac | ||
|
|
cd59705c4b | ||
|
|
77bd23899f | ||
|
|
d211603ba7 | ||
|
|
1dc3bb6eb1 | ||
|
|
22afc7c7fb | ||
|
|
ba82f13821 | ||
|
|
41add96bab | ||
|
|
c8c3f4bef6 | ||
|
|
8cb8ecdae9 | ||
|
|
bd1371680f |
@@ -128,7 +128,6 @@ SENSOR_DESCRIPTIONS = (
|
||||
key=TYPE_AQI_PM25_24H,
|
||||
name="AQI PM2.5 24h avg",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_AQI_PM25_IN,
|
||||
@@ -140,7 +139,6 @@ SENSOR_DESCRIPTIONS = (
|
||||
key=TYPE_AQI_PM25_IN_24H,
|
||||
name="AQI PM2.5 indoor 24h avg",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_BAROMABSIN,
|
||||
@@ -182,7 +180,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
name="Event rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_FEELSLIKE,
|
||||
@@ -287,7 +285,6 @@ SENSOR_DESCRIPTIONS = (
|
||||
name="Last rain",
|
||||
icon="mdi:water",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_LIGHTNING_PER_DAY,
|
||||
@@ -315,7 +312,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
name="Monthly rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_PM25_24H,
|
||||
@@ -586,7 +583,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
name="Lifetime rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_UV,
|
||||
@@ -599,7 +596,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
name="Weekly rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR,
|
||||
|
||||
@@ -209,6 +209,20 @@ class BluetoothManager:
|
||||
self._bluetooth_adapters, self.storage
|
||||
)
|
||||
self.async_setup_unavailable_tracking()
|
||||
seen: set[str] = set()
|
||||
for address, service_info in itertools.chain(
|
||||
self._connectable_history.items(), self._all_history.items()
|
||||
):
|
||||
if address in seen:
|
||||
continue
|
||||
seen.add(address)
|
||||
for domain in self._integration_matcher.match_domains(service_info):
|
||||
discovery_flow.async_create_flow(
|
||||
self.hass,
|
||||
domain,
|
||||
{"source": config_entries.SOURCE_BLUETOOTH},
|
||||
service_info,
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def async_stop(self, event: Event) -> None:
|
||||
|
||||
@@ -17,6 +17,10 @@ from .device_trigger import (
|
||||
CONF_BUTTON_2,
|
||||
CONF_BUTTON_3,
|
||||
CONF_BUTTON_4,
|
||||
CONF_BUTTON_5,
|
||||
CONF_BUTTON_6,
|
||||
CONF_BUTTON_7,
|
||||
CONF_BUTTON_8,
|
||||
CONF_CLOSE,
|
||||
CONF_DIM_DOWN,
|
||||
CONF_DIM_UP,
|
||||
@@ -95,6 +99,10 @@ INTERFACES = {
|
||||
CONF_BUTTON_2: "Button 2",
|
||||
CONF_BUTTON_3: "Button 3",
|
||||
CONF_BUTTON_4: "Button 4",
|
||||
CONF_BUTTON_5: "Button 5",
|
||||
CONF_BUTTON_6: "Button 6",
|
||||
CONF_BUTTON_7: "Button 7",
|
||||
CONF_BUTTON_8: "Button 8",
|
||||
CONF_SIDE_1: "Side 1",
|
||||
CONF_SIDE_2: "Side 2",
|
||||
CONF_SIDE_3: "Side 3",
|
||||
|
||||
@@ -23,7 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_USE_LEGACY_PROTOCOL],
|
||||
)
|
||||
if not smartplug.authenticated and entry.data[CONF_USE_LEGACY_PROTOCOL]:
|
||||
if not smartplug.authenticated and smartplug.use_legacy_protocol:
|
||||
raise ConfigEntryNotReady("Cannot connect/authenticate")
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = SmartPlugData(smartplug)
|
||||
|
||||
@@ -131,6 +131,6 @@ class DLinkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception: %s", ex)
|
||||
return "unknown"
|
||||
if smartplug.authenticated:
|
||||
return None
|
||||
return "cannot_connect"
|
||||
if not smartplug.authenticated and smartplug.use_legacy_protocol:
|
||||
return "cannot_connect"
|
||||
return None
|
||||
|
||||
@@ -209,7 +209,6 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/gas/currently_delivered",
|
||||
name="Current gas usage",
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
||||
@@ -153,6 +153,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if self._device_info.uses_password:
|
||||
return await self.async_step_authenticate()
|
||||
|
||||
self._password = ""
|
||||
return self._async_get_entry()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
|
||||
@@ -72,15 +72,13 @@ class ESPHomeUpdateEntity(CoordinatorEntity[ESPHomeDashboard], UpdateEntity):
|
||||
_attr_title = "ESPHome"
|
||||
_attr_name = "Firmware"
|
||||
|
||||
_device_info: ESPHomeDeviceInfo
|
||||
|
||||
def __init__(
|
||||
self, entry_data: RuntimeEntryData, coordinator: ESPHomeDashboard
|
||||
) -> None:
|
||||
"""Initialize the update entity."""
|
||||
super().__init__(coordinator=coordinator)
|
||||
assert entry_data.device_info is not None
|
||||
self._device_info = entry_data.device_info
|
||||
self._entry_data = entry_data
|
||||
self._attr_unique_id = entry_data.device_info.mac_address
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={
|
||||
@@ -88,6 +86,12 @@ class ESPHomeUpdateEntity(CoordinatorEntity[ESPHomeDashboard], UpdateEntity):
|
||||
}
|
||||
)
|
||||
|
||||
@property
|
||||
def _device_info(self) -> ESPHomeDeviceInfo:
|
||||
"""Return the device info."""
|
||||
assert self._entry_data.device_info is not None
|
||||
return self._entry_data.device_info
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if update is available."""
|
||||
|
||||
@@ -313,7 +313,11 @@ def _generate_device_info(node: Node) -> DeviceInfo:
|
||||
model += f" ({node.type})"
|
||||
|
||||
# Get extra information for Z-Wave Devices
|
||||
if node.protocol == PROTO_ZWAVE and node.zwave_props.mfr_id != "0":
|
||||
if (
|
||||
node.protocol == PROTO_ZWAVE
|
||||
and node.zwave_props
|
||||
and node.zwave_props.mfr_id != "0"
|
||||
):
|
||||
device_info[
|
||||
ATTR_MANUFACTURER
|
||||
] = f"Z-Wave MfrID:{int(node.zwave_props.mfr_id):#0{6}x}"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Keenetic NDMS2 Router",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/keenetic_ndms2",
|
||||
"requirements": ["ndms2_client==0.1.1"],
|
||||
"requirements": ["ndms2_client==0.1.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:InternetGatewayDevice:1",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Matter (BETA)",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/matter",
|
||||
"requirements": ["python-matter-server==2.0.1"],
|
||||
"requirements": ["python-matter-server==2.0.2"],
|
||||
"dependencies": ["websocket_api"],
|
||||
"codeowners": ["@home-assistant/matter"],
|
||||
"iot_class": "local_push"
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant import exceptions
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry, entity_registry
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
@@ -41,37 +41,6 @@ def _get_bridge_devices(bridge: NukiBridge) -> tuple[list[NukiLock], list[NukiOp
|
||||
return bridge.locks, bridge.openers
|
||||
|
||||
|
||||
def _update_devices(devices: list[NukiDevice]) -> dict[str, set[str]]:
|
||||
"""
|
||||
Update the Nuki devices.
|
||||
|
||||
Returns:
|
||||
A dict with the events to be fired. The event type is the key and the device ids are the value
|
||||
"""
|
||||
|
||||
events: dict[str, set[str]] = defaultdict(set)
|
||||
|
||||
for device in devices:
|
||||
for level in (False, True):
|
||||
try:
|
||||
if isinstance(device, NukiOpener):
|
||||
last_ring_action_state = device.ring_action_state
|
||||
|
||||
device.update(level)
|
||||
|
||||
if not last_ring_action_state and device.ring_action_state:
|
||||
events["ring"].add(device.nuki_id)
|
||||
else:
|
||||
device.update(level)
|
||||
except RequestException:
|
||||
continue
|
||||
|
||||
if device.state not in ERROR_STATES:
|
||||
break
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the Nuki entry."""
|
||||
|
||||
@@ -101,42 +70,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except RequestException as err:
|
||||
raise exceptions.ConfigEntryNotReady from err
|
||||
|
||||
async def async_update_data() -> None:
|
||||
"""Fetch data from Nuki bridge."""
|
||||
try:
|
||||
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
|
||||
# handled by the data update coordinator.
|
||||
async with async_timeout.timeout(10):
|
||||
events = await hass.async_add_executor_job(
|
||||
_update_devices, locks + openers
|
||||
)
|
||||
except InvalidCredentialsException as err:
|
||||
raise UpdateFailed(f"Invalid credentials for Bridge: {err}") from err
|
||||
except RequestException as err:
|
||||
raise UpdateFailed(f"Error communicating with Bridge: {err}") from err
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
for event, device_ids in events.items():
|
||||
for device_id in device_ids:
|
||||
entity_id = ent_reg.async_get_entity_id(
|
||||
Platform.LOCK, DOMAIN, device_id
|
||||
)
|
||||
event_data = {
|
||||
"entity_id": entity_id,
|
||||
"type": event,
|
||||
}
|
||||
hass.bus.async_fire("nuki_event", event_data)
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
# Name of the data. For logging purposes.
|
||||
name="nuki devices",
|
||||
update_method=async_update_data,
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
# Device registration for the bridge
|
||||
info = bridge.info()
|
||||
bridge_id = parse_id(info["ids"]["hardwareId"])
|
||||
dev_reg = device_registry.async_get(hass)
|
||||
dev_reg.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, bridge_id)},
|
||||
manufacturer="Nuki Home Solutions GmbH",
|
||||
name=f"Nuki Bridge {bridge_id}",
|
||||
model="Hardware Bridge",
|
||||
sw_version=info["versions"]["firmwareVersion"],
|
||||
)
|
||||
|
||||
coordinator = NukiCoordinator(hass, bridge, locks, openers)
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
DATA_COORDINATOR: coordinator,
|
||||
DATA_BRIDGE: bridge,
|
||||
@@ -178,3 +126,94 @@ class NukiEntity(CoordinatorEntity[DataUpdateCoordinator[None]]):
|
||||
"""Pass coordinator to CoordinatorEntity."""
|
||||
super().__init__(coordinator)
|
||||
self._nuki_device = nuki_device
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Device info for Nuki entities."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, parse_id(self._nuki_device.nuki_id))},
|
||||
"name": self._nuki_device.name,
|
||||
"manufacturer": "Nuki Home Solutions GmbH",
|
||||
"model": self._nuki_device.device_type_str.capitalize(),
|
||||
"sw_version": self._nuki_device.firmware_version,
|
||||
"via_device": (DOMAIN, self.coordinator.bridge_id),
|
||||
}
|
||||
|
||||
|
||||
class NukiCoordinator(DataUpdateCoordinator):
|
||||
"""Data Update Coordinator for the Nuki integration."""
|
||||
|
||||
def __init__(self, hass, bridge, locks, openers):
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
# Name of the data. For logging purposes.
|
||||
name="nuki devices",
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self.bridge = bridge
|
||||
self.locks = locks
|
||||
self.openers = openers
|
||||
|
||||
@property
|
||||
def bridge_id(self):
|
||||
"""Return the parsed id of the Nuki bridge."""
|
||||
return parse_id(self.bridge.info()["ids"]["hardwareId"])
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data from Nuki bridge."""
|
||||
try:
|
||||
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
|
||||
# handled by the data update coordinator.
|
||||
async with async_timeout.timeout(10):
|
||||
events = await self.hass.async_add_executor_job(
|
||||
self.update_devices, self.locks + self.openers
|
||||
)
|
||||
except InvalidCredentialsException as err:
|
||||
raise UpdateFailed(f"Invalid credentials for Bridge: {err}") from err
|
||||
except RequestException as err:
|
||||
raise UpdateFailed(f"Error communicating with Bridge: {err}") from err
|
||||
|
||||
ent_reg = entity_registry.async_get(self.hass)
|
||||
for event, device_ids in events.items():
|
||||
for device_id in device_ids:
|
||||
entity_id = ent_reg.async_get_entity_id(
|
||||
Platform.LOCK, DOMAIN, device_id
|
||||
)
|
||||
event_data = {
|
||||
"entity_id": entity_id,
|
||||
"type": event,
|
||||
}
|
||||
self.hass.bus.async_fire("nuki_event", event_data)
|
||||
|
||||
def update_devices(self, devices: list[NukiDevice]) -> dict[str, set[str]]:
|
||||
"""
|
||||
Update the Nuki devices.
|
||||
|
||||
Returns:
|
||||
A dict with the events to be fired. The event type is the key and the device ids are the value
|
||||
"""
|
||||
|
||||
events: dict[str, set[str]] = defaultdict(set)
|
||||
|
||||
for device in devices:
|
||||
for level in (False, True):
|
||||
try:
|
||||
if isinstance(device, NukiOpener):
|
||||
last_ring_action_state = device.ring_action_state
|
||||
|
||||
device.update(level)
|
||||
|
||||
if not last_ring_action_state and device.ring_action_state:
|
||||
events["ring"].add(device.nuki_id)
|
||||
else:
|
||||
device.update(level)
|
||||
except RequestException:
|
||||
continue
|
||||
|
||||
if device.state not in ERROR_STATES:
|
||||
break
|
||||
|
||||
return events
|
||||
|
||||
@@ -34,13 +34,10 @@ async def async_setup_entry(
|
||||
class NukiDoorsensorEntity(NukiEntity, BinarySensorEntity):
|
||||
"""Representation of a Nuki Lock Doorsensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = "Door sensor"
|
||||
_attr_device_class = BinarySensorDeviceClass.DOOR
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the lock."""
|
||||
return self._nuki_device.name
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return a unique ID."""
|
||||
|
||||
@@ -67,13 +67,9 @@ async def async_setup_entry(
|
||||
class NukiDeviceEntity(NukiEntity, LockEntity, ABC):
|
||||
"""Representation of a Nuki device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = LockEntityFeature.OPEN
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Return the name of the lock."""
|
||||
return self._nuki_device.name
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str | None:
|
||||
"""Return a unique ID."""
|
||||
|
||||
@@ -73,6 +73,7 @@ class OpenAIAgent(conversation.AbstractConversationAgent):
|
||||
try:
|
||||
prompt = self._async_generate_prompt()
|
||||
except TemplateError as err:
|
||||
_LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
|
||||
@@ -7,22 +7,22 @@ DEFAULT_PROMPT = """This smart home is controlled by Home Assistant.
|
||||
|
||||
An overview of the areas and the devices in this smart home:
|
||||
{%- for area in areas %}
|
||||
{%- set area_info = namespace(printed=false) %}
|
||||
{%- for device in area_devices(area.name) -%}
|
||||
{%- if not device_attr(device, "disabled_by") and not device_attr(device, "entry_type") %}
|
||||
{%- if not area_info.printed %}
|
||||
{%- set area_info = namespace(printed=false) %}
|
||||
{%- for device in area_devices(area.name) -%}
|
||||
{%- if not device_attr(device, "disabled_by") and not device_attr(device, "entry_type") %}
|
||||
{%- if not area_info.printed %}
|
||||
|
||||
{{ area.name }}:
|
||||
{%- set area_info.printed = true %}
|
||||
{%- endif %}
|
||||
- {{ device_attr(device, "name") }}{% if device_attr(device, "model") not in device_attr(device, "name") %} ({{ device_attr(device, "model") }}){% endif %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- set area_info.printed = true %}
|
||||
{%- endif %}
|
||||
- {{ device_attr(device, "name") }}{% if device_attr(device, "model") and device_attr(device, "model") not in device_attr(device, "name") %} ({{ device_attr(device, "model") }}){% endif %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- endfor %}
|
||||
|
||||
Answer the users questions about the world truthfully.
|
||||
|
||||
If the user wants to control a device, reject the request and suggest using the Home Assistant UI.
|
||||
If the user wants to control a device, reject the request and suggest using the Home Assistant app.
|
||||
|
||||
Now finish this conversation:
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "plugwise",
|
||||
"name": "Plugwise",
|
||||
"documentation": "https://www.home-assistant.io/integrations/plugwise",
|
||||
"requirements": ["plugwise==0.27.1"],
|
||||
"requirements": ["plugwise==0.27.4"],
|
||||
"codeowners": ["@CoMPaTech", "@bouwew", "@brefra", "@frenck"],
|
||||
"zeroconf": ["_plugwise._tcp.local."],
|
||||
"config_flow": true,
|
||||
|
||||
@@ -836,7 +836,9 @@ class Recorder(threading.Thread):
|
||||
return
|
||||
|
||||
try:
|
||||
shared_data_bytes = EventData.shared_data_bytes_from_event(event)
|
||||
shared_data_bytes = EventData.shared_data_bytes_from_event(
|
||||
event, self.dialect_name
|
||||
)
|
||||
except JSON_ENCODE_EXCEPTIONS as ex:
|
||||
_LOGGER.warning("Event is not JSON serializable: %s: %s", event, ex)
|
||||
return
|
||||
@@ -869,7 +871,7 @@ class Recorder(threading.Thread):
|
||||
try:
|
||||
dbstate = States.from_event(event)
|
||||
shared_attrs_bytes = StateAttributes.shared_attrs_bytes_from_event(
|
||||
event, self._exclude_attributes_by_domain
|
||||
event, self._exclude_attributes_by_domain, self.dialect_name
|
||||
)
|
||||
except JSON_ENCODE_EXCEPTIONS as ex:
|
||||
_LOGGER.warning(
|
||||
@@ -1024,7 +1026,9 @@ class Recorder(threading.Thread):
|
||||
|
||||
def _post_schema_migration(self, old_version: int, new_version: int) -> None:
|
||||
"""Run post schema migration tasks."""
|
||||
migration.post_schema_migration(self.event_session, old_version, new_version)
|
||||
migration.post_schema_migration(
|
||||
self.engine, self.event_session, old_version, new_version
|
||||
)
|
||||
|
||||
def _send_keep_alive(self) -> None:
|
||||
"""Send a keep alive to keep the db connection open."""
|
||||
|
||||
@@ -43,11 +43,12 @@ from homeassistant.helpers.json import (
|
||||
JSON_DECODE_EXCEPTIONS,
|
||||
JSON_DUMP,
|
||||
json_bytes,
|
||||
json_bytes_strip_null,
|
||||
json_loads,
|
||||
)
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ALL_DOMAIN_EXCLUDE_ATTRS
|
||||
from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect
|
||||
from .models import StatisticData, StatisticMetaData, process_timestamp
|
||||
|
||||
# SQLAlchemy Schema
|
||||
@@ -251,8 +252,12 @@ class EventData(Base): # type: ignore[misc,valid-type]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def shared_data_bytes_from_event(event: Event) -> bytes:
|
||||
def shared_data_bytes_from_event(
|
||||
event: Event, dialect: SupportedDialect | None
|
||||
) -> bytes:
|
||||
"""Create shared_data from an event."""
|
||||
if dialect == SupportedDialect.POSTGRESQL:
|
||||
return json_bytes_strip_null(event.data)
|
||||
return json_bytes(event.data)
|
||||
|
||||
@staticmethod
|
||||
@@ -416,7 +421,9 @@ class StateAttributes(Base): # type: ignore[misc,valid-type]
|
||||
|
||||
@staticmethod
|
||||
def shared_attrs_bytes_from_event(
|
||||
event: Event, exclude_attrs_by_domain: dict[str, set[str]]
|
||||
event: Event,
|
||||
exclude_attrs_by_domain: dict[str, set[str]],
|
||||
dialect: SupportedDialect | None,
|
||||
) -> bytes:
|
||||
"""Create shared_attrs from a state_changed event."""
|
||||
state: State | None = event.data.get("new_state")
|
||||
@@ -427,6 +434,10 @@ class StateAttributes(Base): # type: ignore[misc,valid-type]
|
||||
exclude_attrs = (
|
||||
exclude_attrs_by_domain.get(domain, set()) | ALL_DOMAIN_EXCLUDE_ATTRS
|
||||
)
|
||||
if dialect == SupportedDialect.POSTGRESQL:
|
||||
return json_bytes_strip_null(
|
||||
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
||||
)
|
||||
return json_bytes(
|
||||
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
||||
)
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.engine import CursorResult, Engine
|
||||
from sqlalchemy.exc import (
|
||||
DatabaseError,
|
||||
InternalError,
|
||||
@@ -43,7 +43,7 @@ from .statistics import (
|
||||
get_start_time,
|
||||
validate_db_schema as statistics_validate_db_schema,
|
||||
)
|
||||
from .tasks import PostSchemaMigrationTask
|
||||
from .tasks import CommitTask, PostSchemaMigrationTask
|
||||
from .util import session_scope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -166,6 +166,9 @@ def migrate_schema(
|
||||
|
||||
if current_version != SCHEMA_VERSION:
|
||||
instance.queue_task(PostSchemaMigrationTask(current_version, SCHEMA_VERSION))
|
||||
# Make sure the post schema migration task is committed in case
|
||||
# the next task does not have commit_before = True
|
||||
instance.queue_task(CommitTask())
|
||||
|
||||
|
||||
def _create_index(
|
||||
@@ -846,8 +849,7 @@ def _apply_update( # noqa: C901
|
||||
_create_index(session_maker, "events", "ix_events_event_type_time_fired_ts")
|
||||
_create_index(session_maker, "states", "ix_states_entity_id_last_updated_ts")
|
||||
_create_index(session_maker, "states", "ix_states_last_updated_ts")
|
||||
with session_scope(session=session_maker()) as session:
|
||||
_migrate_columns_to_timestamp(hass, session, engine)
|
||||
_migrate_columns_to_timestamp(session_maker, engine)
|
||||
elif new_version == 32:
|
||||
# Migration is done in two steps to ensure we can start using
|
||||
# the new columns before we wipe the old ones.
|
||||
@@ -860,6 +862,7 @@ def _apply_update( # noqa: C901
|
||||
|
||||
|
||||
def post_schema_migration(
|
||||
engine: Engine,
|
||||
session: Session,
|
||||
old_version: int,
|
||||
new_version: int,
|
||||
@@ -878,62 +881,142 @@ def post_schema_migration(
|
||||
# In version 31 we migrated all the time_fired, last_updated, and last_changed
|
||||
# columns to be timestamps. In version 32 we need to wipe the old columns
|
||||
# since they are no longer used and take up a significant amount of space.
|
||||
_wipe_old_string_time_columns(session)
|
||||
_wipe_old_string_time_columns(engine, session)
|
||||
|
||||
|
||||
def _wipe_old_string_time_columns(session: Session) -> None:
|
||||
def _wipe_old_string_time_columns(engine: Engine, session: Session) -> None:
|
||||
"""Wipe old string time columns to save space."""
|
||||
# Wipe Events.time_fired since its been replaced by Events.time_fired_ts
|
||||
# Wipe States.last_updated since its been replaced by States.last_updated_ts
|
||||
# Wipe States.last_changed since its been replaced by States.last_changed_ts
|
||||
session.execute(text("UPDATE events set time_fired=NULL;"))
|
||||
session.execute(text("UPDATE states set last_updated=NULL, last_changed=NULL;"))
|
||||
session.commit()
|
||||
#
|
||||
if engine.dialect.name == SupportedDialect.SQLITE:
|
||||
session.execute(text("UPDATE events set time_fired=NULL;"))
|
||||
session.commit()
|
||||
session.execute(text("UPDATE states set last_updated=NULL, last_changed=NULL;"))
|
||||
session.commit()
|
||||
elif engine.dialect.name == SupportedDialect.MYSQL:
|
||||
#
|
||||
# Since this is only to save space we limit the number of rows we update
|
||||
# to 10,000,000 per table since we do not want to block the database for too long
|
||||
# or run out of innodb_buffer_pool_size on MySQL. The old data will eventually
|
||||
# be cleaned up by the recorder purge if we do not do it now.
|
||||
#
|
||||
session.execute(text("UPDATE events set time_fired=NULL LIMIT 10000000;"))
|
||||
session.commit()
|
||||
session.execute(
|
||||
text(
|
||||
"UPDATE states set last_updated=NULL, last_changed=NULL "
|
||||
" LIMIT 10000000;"
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
|
||||
#
|
||||
# Since this is only to save space we limit the number of rows we update
|
||||
# to 250,000 per table since we do not want to block the database for too long
|
||||
# or run out ram with postgresql. The old data will eventually
|
||||
# be cleaned up by the recorder purge if we do not do it now.
|
||||
#
|
||||
session.execute(
|
||||
text(
|
||||
"UPDATE events set time_fired=NULL "
|
||||
"where event_id in "
|
||||
"(select event_id from events where time_fired_ts is NOT NULL LIMIT 250000);"
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
session.execute(
|
||||
text(
|
||||
"UPDATE states set last_updated=NULL, last_changed=NULL "
|
||||
"where state_id in "
|
||||
"(select state_id from states where last_updated_ts is NOT NULL LIMIT 250000);"
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
||||
def _migrate_columns_to_timestamp(
|
||||
hass: HomeAssistant, session: Session, engine: Engine
|
||||
session_maker: Callable[[], Session], engine: Engine
|
||||
) -> None:
|
||||
"""Migrate columns to use timestamp."""
|
||||
# Migrate all data in Events.time_fired to Events.time_fired_ts
|
||||
# Migrate all data in States.last_updated to States.last_updated_ts
|
||||
# Migrate all data in States.last_changed to States.last_changed_ts
|
||||
connection = session.connection()
|
||||
result: CursorResult | None = None
|
||||
if engine.dialect.name == SupportedDialect.SQLITE:
|
||||
connection.execute(
|
||||
text(
|
||||
'UPDATE events set time_fired_ts=strftime("%s",time_fired) + '
|
||||
"cast(substr(time_fired,-7) AS FLOAT);"
|
||||
# With SQLite we do this in one go since it is faster
|
||||
with session_scope(session=session_maker()) as session:
|
||||
connection = session.connection()
|
||||
connection.execute(
|
||||
text(
|
||||
'UPDATE events set time_fired_ts=strftime("%s",time_fired) + '
|
||||
"cast(substr(time_fired,-7) AS FLOAT);"
|
||||
)
|
||||
)
|
||||
)
|
||||
connection.execute(
|
||||
text(
|
||||
'UPDATE states set last_updated_ts=strftime("%s",last_updated) + '
|
||||
"cast(substr(last_updated,-7) AS FLOAT), "
|
||||
'last_changed_ts=strftime("%s",last_changed) + '
|
||||
"cast(substr(last_changed,-7) AS FLOAT);"
|
||||
connection.execute(
|
||||
text(
|
||||
'UPDATE states set last_updated_ts=strftime("%s",last_updated) + '
|
||||
"cast(substr(last_updated,-7) AS FLOAT), "
|
||||
'last_changed_ts=strftime("%s",last_changed) + '
|
||||
"cast(substr(last_changed,-7) AS FLOAT);"
|
||||
)
|
||||
)
|
||||
)
|
||||
elif engine.dialect.name == SupportedDialect.MYSQL:
|
||||
connection.execute(
|
||||
text("UPDATE events set time_fired_ts=UNIX_TIMESTAMP(time_fired);")
|
||||
)
|
||||
connection.execute(
|
||||
text(
|
||||
"UPDATE states set last_updated_ts=UNIX_TIMESTAMP(last_updated), "
|
||||
"last_changed_ts=UNIX_TIMESTAMP(last_changed);"
|
||||
)
|
||||
)
|
||||
# With MySQL we do this in chunks to avoid hitting the `innodb_buffer_pool_size` limit
|
||||
# We also need to do this in a loop since we can't be sure that we have
|
||||
# updated all rows in the table until the rowcount is 0
|
||||
while result is None or result.rowcount > 0:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
result = session.connection().execute(
|
||||
text(
|
||||
"UPDATE events set time_fired_ts="
|
||||
"IF(time_fired is NULL,0,UNIX_TIMESTAMP(time_fired)) "
|
||||
"where time_fired_ts is NULL "
|
||||
"LIMIT 250000;"
|
||||
)
|
||||
)
|
||||
result = None
|
||||
while result is None or result.rowcount > 0:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
result = session.connection().execute(
|
||||
text(
|
||||
"UPDATE states set last_updated_ts="
|
||||
"IF(last_updated is NULL,0,UNIX_TIMESTAMP(last_updated)), "
|
||||
"last_changed_ts=UNIX_TIMESTAMP(last_changed) "
|
||||
"where last_updated_ts is NULL "
|
||||
"LIMIT 250000;"
|
||||
)
|
||||
)
|
||||
elif engine.dialect.name == SupportedDialect.POSTGRESQL:
|
||||
connection.execute(
|
||||
text("UPDATE events set time_fired_ts=EXTRACT(EPOCH FROM time_fired);")
|
||||
)
|
||||
connection.execute(
|
||||
text(
|
||||
"UPDATE states set last_updated_ts=EXTRACT(EPOCH FROM last_updated), "
|
||||
"last_changed_ts=EXTRACT(EPOCH FROM last_changed);"
|
||||
)
|
||||
)
|
||||
# With Postgresql we do this in chunks to avoid using too much memory
|
||||
# We also need to do this in a loop since we can't be sure that we have
|
||||
# updated all rows in the table until the rowcount is 0
|
||||
while result is None or result.rowcount > 0:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
result = session.connection().execute(
|
||||
text(
|
||||
"UPDATE events SET "
|
||||
"time_fired_ts= "
|
||||
"(case when time_fired is NULL then 0 else EXTRACT(EPOCH FROM time_fired) end) "
|
||||
"WHERE event_id IN ( "
|
||||
"SELECT event_id FROM events where time_fired_ts is NULL LIMIT 250000 "
|
||||
" );"
|
||||
)
|
||||
)
|
||||
result = None
|
||||
while result is None or result.rowcount > 0:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
result = session.connection().execute(
|
||||
text(
|
||||
"UPDATE states set last_updated_ts="
|
||||
"(case when last_updated is NULL then 0 else EXTRACT(EPOCH FROM last_updated) end), "
|
||||
"last_changed_ts=EXTRACT(EPOCH FROM last_changed) "
|
||||
"where state_id IN ( "
|
||||
"SELECT state_id FROM states where last_updated_ts is NULL LIMIT 250000 "
|
||||
" );"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _initialize_database(session: Session) -> bool:
|
||||
|
||||
@@ -513,7 +513,7 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass | None]
|
||||
SensorDeviceClass.DATA_SIZE: set(SensorStateClass),
|
||||
SensorDeviceClass.DATE: set(),
|
||||
SensorDeviceClass.DISTANCE: set(SensorStateClass),
|
||||
SensorDeviceClass.DURATION: set(),
|
||||
SensorDeviceClass.DURATION: set(SensorStateClass),
|
||||
SensorDeviceClass.ENERGY: {
|
||||
SensorStateClass.TOTAL,
|
||||
SensorStateClass.TOTAL_INCREASING,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==5.3.0"],
|
||||
"requirements": ["aioshelly==5.3.1"],
|
||||
"dependencies": ["bluetooth", "http"],
|
||||
"zeroconf": [
|
||||
{
|
||||
|
||||
@@ -17,7 +17,7 @@ button:
|
||||
description: >-
|
||||
Name of the button to press. Known possible values are
|
||||
LEFT, RIGHT, DOWN, UP, HOME, MENU, BACK, ENTER, DASH, INFO, ASTERISK, CC, EXIT,
|
||||
MUTE, RED, GREEN, BLUE, VOLUMEUP, VOLUMEDOWN, CHANNELUP, CHANNELDOWN,
|
||||
MUTE, RED, GREEN, BLUE, YELLOW, VOLUMEUP, VOLUMEDOWN, CHANNELUP, CHANNELDOWN,
|
||||
PLAY, PAUSE, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
|
||||
required: true
|
||||
example: "LEFT"
|
||||
|
||||
@@ -224,7 +224,8 @@ class InovelliConfigEntityChannel(ZigbeeChannel):
|
||||
"switch_type": False,
|
||||
"button_delay": False,
|
||||
"smart_bulb_mode": False,
|
||||
"double_tap_up_for_full_brightness": True,
|
||||
"double_tap_up_for_max_brightness": True,
|
||||
"double_tap_down_for_min_brightness": True,
|
||||
"led_color_when_on": True,
|
||||
"led_color_when_off": True,
|
||||
"led_intensity_when_on": True,
|
||||
|
||||
@@ -372,14 +372,26 @@ class InovelliSmartBulbMode(ZHASwitchConfigurationEntity, id_suffix="smart_bulb_
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliDoubleTapForFullBrightness(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_up_for_full_brightness"
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_up_for_max_brightness"
|
||||
):
|
||||
"""Inovelli double tap for full brightness control."""
|
||||
|
||||
_zcl_attribute: str = "double_tap_up_for_full_brightness"
|
||||
_zcl_attribute: str = "double_tap_up_for_max_brightness"
|
||||
_attr_name: str = "Double tap full brightness"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliDoubleTapForMinBrightness(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_down_for_min_brightness"
|
||||
):
|
||||
"""Inovelli double tap down for minimum brightness control."""
|
||||
|
||||
_zcl_attribute: str = "double_tap_down_for_min_brightness"
|
||||
_attr_name: str = "Double tap minimum brightness"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0b1"
|
||||
PATCH_VERSION: Final = "0b3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||
|
||||
@@ -71,6 +71,40 @@ def json_bytes(data: Any) -> bytes:
|
||||
)
|
||||
|
||||
|
||||
def json_bytes_strip_null(data: Any) -> bytes:
|
||||
"""Dump json bytes after terminating strings at the first NUL."""
|
||||
|
||||
def process_dict(_dict: dict[Any, Any]) -> dict[Any, Any]:
|
||||
"""Strip NUL from items in a dict."""
|
||||
return {key: strip_null(o) for key, o in _dict.items()}
|
||||
|
||||
def process_list(_list: list[Any]) -> list[Any]:
|
||||
"""Strip NUL from items in a list."""
|
||||
return [strip_null(o) for o in _list]
|
||||
|
||||
def strip_null(obj: Any) -> Any:
|
||||
"""Strip NUL from an object."""
|
||||
if isinstance(obj, str):
|
||||
return obj.split("\0", 1)[0]
|
||||
if isinstance(obj, dict):
|
||||
return process_dict(obj)
|
||||
if isinstance(obj, list):
|
||||
return process_list(obj)
|
||||
return obj
|
||||
|
||||
# We expect null-characters to be very rare, hence try encoding first and look
|
||||
# for an escaped null-character in the output.
|
||||
result = json_bytes(data)
|
||||
if b"\\u0000" in result:
|
||||
# We work on the processed result so we don't need to worry about
|
||||
# Home Assistant extensions which allows encoding sets, tuples, etc.
|
||||
data_processed = orjson.loads(result)
|
||||
data_processed = strip_null(data_processed)
|
||||
result = json_bytes(data_processed)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def json_dumps(data: Any) -> str:
|
||||
"""Dump json string.
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.2.0b1"
|
||||
version = "2023.2.0b3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
||||
@@ -267,7 +267,7 @@ aiosenseme==0.6.1
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==5.3.0
|
||||
aioshelly==5.3.1
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -1168,7 +1168,7 @@ mycroftapi==2.0
|
||||
nad_receiver==0.3.0
|
||||
|
||||
# homeassistant.components.keenetic_ndms2
|
||||
ndms2_client==0.1.1
|
||||
ndms2_client==0.1.2
|
||||
|
||||
# homeassistant.components.ness_alarm
|
||||
nessclient==0.10.0
|
||||
@@ -1373,7 +1373,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.13
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==0.27.1
|
||||
plugwise==0.27.4
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
@@ -2072,7 +2072,7 @@ python-kasa==0.5.0
|
||||
# python-lirc==1.2.3
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==2.0.1
|
||||
python-matter-server==2.0.2
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
|
||||
@@ -245,7 +245,7 @@ aiosenseme==0.6.1
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==5.3.0
|
||||
aioshelly==5.3.1
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -867,7 +867,7 @@ mutagen==1.46.0
|
||||
mutesync==0.0.1
|
||||
|
||||
# homeassistant.components.keenetic_ndms2
|
||||
ndms2_client==0.1.1
|
||||
ndms2_client==0.1.2
|
||||
|
||||
# homeassistant.components.ness_alarm
|
||||
nessclient==0.10.0
|
||||
@@ -1003,7 +1003,7 @@ plexauth==0.0.6
|
||||
plexwebsocket==0.0.13
|
||||
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==0.27.1
|
||||
plugwise==0.27.4
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
@@ -1468,7 +1468,7 @@ python-juicenet==1.1.0
|
||||
python-kasa==0.5.0
|
||||
|
||||
# homeassistant.components.matter
|
||||
python-matter-server==2.0.1
|
||||
python-matter-server==2.0.2
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.12
|
||||
|
||||
@@ -186,3 +186,18 @@ def one_adapter_old_bluez():
|
||||
},
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="disable_new_discovery_flows")
|
||||
def disable_new_discovery_flows_fixture():
|
||||
"""Fixture that disables new discovery flows.
|
||||
|
||||
We want to disable new discovery flows as we are testing the
|
||||
BluetoothManager and not the discovery flows. This fixture
|
||||
will patch the discovery_flow.async_create_flow method to
|
||||
ensure we do not load other integrations.
|
||||
"""
|
||||
with patch(
|
||||
"homeassistant.components.bluetooth.manager.discovery_flow.async_create_flow"
|
||||
) as mock_create_flow:
|
||||
yield mock_create_flow
|
||||
|
||||
@@ -345,7 +345,9 @@ async def test_base_scanner_connecting_behavior(hass, enable_bluetooth):
|
||||
unsetup()
|
||||
|
||||
|
||||
async def test_restore_history_remote_adapter(hass, hass_storage):
|
||||
async def test_restore_history_remote_adapter(
|
||||
hass, hass_storage, disable_new_discovery_flows
|
||||
):
|
||||
"""Test we can restore history for a remote adapter."""
|
||||
|
||||
data = hass_storage[storage.REMOTE_SCANNER_STORAGE_KEY] = json_loads(
|
||||
|
||||
@@ -282,7 +282,9 @@ async def test_switching_adapters_based_on_stale(
|
||||
)
|
||||
|
||||
|
||||
async def test_restore_history_from_dbus(hass, one_adapter):
|
||||
async def test_restore_history_from_dbus(
|
||||
hass, one_adapter, disable_new_discovery_flows
|
||||
):
|
||||
"""Test we can restore history from dbus."""
|
||||
address = "AA:BB:CC:CC:CC:FF"
|
||||
|
||||
@@ -304,7 +306,7 @@ async def test_restore_history_from_dbus(hass, one_adapter):
|
||||
|
||||
|
||||
async def test_restore_history_from_dbus_and_remote_adapters(
|
||||
hass, one_adapter, hass_storage
|
||||
hass, one_adapter, hass_storage, disable_new_discovery_flows
|
||||
):
|
||||
"""Test we can restore history from dbus along with remote adapters."""
|
||||
address = "AA:BB:CC:CC:CC:FF"
|
||||
@@ -337,10 +339,11 @@ async def test_restore_history_from_dbus_and_remote_adapters(
|
||||
assert (
|
||||
bluetooth.async_ble_device_from_address(hass, "EB:0B:36:35:6F:A4") is not None
|
||||
)
|
||||
assert disable_new_discovery_flows.call_count > 1
|
||||
|
||||
|
||||
async def test_restore_history_from_dbus_and_corrupted_remote_adapters(
|
||||
hass, one_adapter, hass_storage
|
||||
hass, one_adapter, hass_storage, disable_new_discovery_flows
|
||||
):
|
||||
"""Test we can restore history from dbus when the remote adapters data is corrupted."""
|
||||
address = "AA:BB:CC:CC:CC:FF"
|
||||
@@ -371,6 +374,7 @@ async def test_restore_history_from_dbus_and_corrupted_remote_adapters(
|
||||
|
||||
assert bluetooth.async_ble_device_from_address(hass, address) is not None
|
||||
assert bluetooth.async_ble_device_from_address(hass, "EB:0B:36:35:6F:A4") is None
|
||||
assert disable_new_discovery_flows.call_count >= 1
|
||||
|
||||
|
||||
async def test_switching_adapters_based_on_rssi_connectable_to_non_connectable(
|
||||
|
||||
@@ -518,6 +518,54 @@ async def test_reauth_fixed_via_dashboard(
|
||||
assert len(mock_get_encryption_key.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_reauth_fixed_via_dashboard_remove_password(
|
||||
hass, mock_client, mock_zeroconf, mock_dashboard
|
||||
):
|
||||
"""Test reauth fixed automatically via dashboard with password removed."""
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 6053,
|
||||
CONF_PASSWORD: "hello",
|
||||
CONF_DEVICE_NAME: "test",
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test")
|
||||
|
||||
mock_dashboard["configured"].append(
|
||||
{
|
||||
"name": "test",
|
||||
"configuration": "test.yaml",
|
||||
}
|
||||
)
|
||||
|
||||
await dashboard.async_get_dashboard(hass).async_refresh()
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.esphome.dashboard.ESPHomeDashboardAPI.get_encryption_key",
|
||||
return_value=VALID_NOISE_PSK,
|
||||
) as mock_get_encryption_key:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
"esphome",
|
||||
context={
|
||||
"source": config_entries.SOURCE_REAUTH,
|
||||
"entry_id": entry.entry_id,
|
||||
"unique_id": entry.unique_id,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] == FlowResultType.ABORT, result
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK
|
||||
assert entry.data[CONF_PASSWORD] == ""
|
||||
|
||||
assert len(mock_get_encryption_key.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_reauth_confirm_invalid(hass, mock_client, mock_zeroconf):
|
||||
"""Test reauth initiation with invalid PSK."""
|
||||
entry = MockConfigEntry(
|
||||
|
||||
@@ -50,9 +50,28 @@ async def test_default_prompt(hass, mock_init_component):
|
||||
model="Test Model 3A",
|
||||
suggested_area="Test Area 2",
|
||||
)
|
||||
device_reg.async_get_or_create(
|
||||
config_entry_id="1234",
|
||||
connections={("test", "qwer")},
|
||||
name="Test Device 4",
|
||||
suggested_area="Test Area 2",
|
||||
)
|
||||
device = device_reg.async_get_or_create(
|
||||
config_entry_id="1234",
|
||||
connections={("test", "9876-disabled")},
|
||||
name="Test Device 3",
|
||||
manufacturer="Test Manufacturer 3",
|
||||
model="Test Model 3A",
|
||||
suggested_area="Test Area 2",
|
||||
)
|
||||
device_reg.async_update_device(
|
||||
device.id, disabled_by=device_registry.DeviceEntryDisabler.USER
|
||||
)
|
||||
|
||||
with patch("openai.Completion.create") as mock_create:
|
||||
await conversation.async_converse(hass, "hello", None, Context())
|
||||
result = await conversation.async_converse(hass, "hello", None, Context())
|
||||
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
|
||||
assert (
|
||||
mock_create.mock_calls[0][2]["prompt"]
|
||||
@@ -66,10 +85,11 @@ Test Area:
|
||||
Test Area 2:
|
||||
- Test Device 2
|
||||
- Test Device 3 (Test Model 3A)
|
||||
- Test Device 4
|
||||
|
||||
Answer the users questions about the world truthfully.
|
||||
|
||||
If the user wants to control a device, reject the request and suggest using the Home Assistant UI.
|
||||
If the user wants to control a device, reject the request and suggest using the Home Assistant app.
|
||||
|
||||
Now finish this conversation:
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ from sqlalchemy.ext.declarative import declared_attr
|
||||
from sqlalchemy.orm import aliased, declarative_base, relationship
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from homeassistant.components.recorder.const import SupportedDialect
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION,
|
||||
ATTR_RESTORED,
|
||||
@@ -287,7 +288,9 @@ class EventData(Base): # type: ignore[misc,valid-type]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def shared_data_bytes_from_event(event: Event) -> bytes:
|
||||
def shared_data_bytes_from_event(
|
||||
event: Event, dialect: SupportedDialect | None
|
||||
) -> bytes:
|
||||
"""Create shared_data from an event."""
|
||||
return json_bytes(event.data)
|
||||
|
||||
@@ -438,7 +441,9 @@ class StateAttributes(Base): # type: ignore[misc,valid-type]
|
||||
|
||||
@staticmethod
|
||||
def shared_attrs_bytes_from_event(
|
||||
event: Event, exclude_attrs_by_domain: dict[str, set[str]]
|
||||
event: Event,
|
||||
exclude_attrs_by_domain: dict[str, set[str]],
|
||||
dialect: SupportedDialect | None,
|
||||
) -> bytes:
|
||||
"""Create shared_attrs from a state_changed event."""
|
||||
state: State | None = event.data.get("new_state")
|
||||
|
||||
@@ -31,6 +31,7 @@ from homeassistant.components.recorder.const import (
|
||||
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
|
||||
EVENT_RECORDER_HOURLY_STATISTICS_GENERATED,
|
||||
KEEPALIVE_TIME,
|
||||
SupportedDialect,
|
||||
)
|
||||
from homeassistant.components.recorder.db_schema import (
|
||||
SCHEMA_VERSION,
|
||||
@@ -223,6 +224,42 @@ async def test_saving_state(recorder_mock, hass: HomeAssistant):
|
||||
assert state == _state_with_context(hass, entity_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dialect_name, expected_attributes",
|
||||
(
|
||||
(SupportedDialect.MYSQL, {"test_attr": 5, "test_attr_10": "silly\0stuff"}),
|
||||
(SupportedDialect.POSTGRESQL, {"test_attr": 5, "test_attr_10": "silly"}),
|
||||
(SupportedDialect.SQLITE, {"test_attr": 5, "test_attr_10": "silly\0stuff"}),
|
||||
),
|
||||
)
|
||||
async def test_saving_state_with_nul(
|
||||
recorder_mock, hass: HomeAssistant, dialect_name, expected_attributes
|
||||
):
|
||||
"""Test saving and restoring a state with nul in attributes."""
|
||||
entity_id = "test.recorder"
|
||||
state = "restoring_from_db"
|
||||
attributes = {"test_attr": 5, "test_attr_10": "silly\0stuff"}
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.Recorder.dialect_name", dialect_name
|
||||
):
|
||||
hass.states.async_set(entity_id, state, attributes)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
db_states = []
|
||||
for db_state, db_state_attributes in session.query(States, StateAttributes):
|
||||
db_states.append(db_state)
|
||||
state = db_state.to_native()
|
||||
state.attributes = db_state_attributes.to_native()
|
||||
assert len(db_states) == 1
|
||||
assert db_states[0].event_id is None
|
||||
|
||||
expected = _state_with_context(hass, entity_id)
|
||||
expected.attributes = expected_attributes
|
||||
assert state == expected
|
||||
|
||||
|
||||
async def test_saving_many_states(
|
||||
async_setup_recorder_instance: SetupRecorderInstanceT, hass: HomeAssistant
|
||||
):
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant import core
|
||||
from homeassistant.helpers.json import (
|
||||
ExtendedJSONEncoder,
|
||||
JSONEncoder,
|
||||
json_bytes_strip_null,
|
||||
json_dumps,
|
||||
json_dumps_sorted,
|
||||
)
|
||||
@@ -118,3 +119,19 @@ def test_json_dumps_rgb_color_subclass():
|
||||
rgb = RGBColor(4, 2, 1)
|
||||
|
||||
assert json_dumps(rgb) == "[4,2,1]"
|
||||
|
||||
|
||||
def test_json_bytes_strip_null():
|
||||
"""Test stripping nul from strings."""
|
||||
|
||||
assert json_bytes_strip_null("\0") == b'""'
|
||||
assert json_bytes_strip_null("silly\0stuff") == b'"silly"'
|
||||
assert json_bytes_strip_null(["one", "two\0", "three"]) == b'["one","two","three"]'
|
||||
assert (
|
||||
json_bytes_strip_null({"k1": "one", "k2": "two\0", "k3": "three"})
|
||||
== b'{"k1":"one","k2":"two","k3":"three"}'
|
||||
)
|
||||
assert (
|
||||
json_bytes_strip_null([[{"k1": {"k2": ["silly\0stuff"]}}]])
|
||||
== b'[[{"k1":{"k2":["silly"]}}]]'
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user