Compare commits

...

6 Commits

Author SHA1 Message Date
Paulus Schoutsen
fe35fac8ee Bad AI put a section back. 2025-10-03 09:48:43 -04:00
Paulus Schoutsen
4bccc57b46 Add config flow diagrams to Z-Wave JS docs 2025-10-03 09:31:45 -04:00
Shay Levy
404f95b442 Add Shelly support for valve entities (#153348) 2025-10-03 15:04:35 +03:00
cdnninja
89cf784022 Fix VeSync zero fan speed handling (#153493)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-10-03 11:56:02 +02:00
Copilot
02142f352d Fix awair integration AttributeError when update listener accesses runtime_data (#153521)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: balloob <1444314+balloob@users.noreply.github.com>
2025-10-03 05:49:41 -04:00
Stefan Agner
ec3dd7d1e5 Add num open fds sensor to systemmonitor (#152441)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-10-03 10:53:02 +02:00
17 changed files with 1295 additions and 132 deletions

View File

@@ -26,9 +26,6 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -36,6 +33,11 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True

View File

@@ -308,3 +308,7 @@ DEVICE_UNIT_MAP = {
MAX_SCRIPT_SIZE = 5120
All_LIGHT_TYPES = ("cct", "light", "rgb", "rgbw")
# Shelly-X specific models
MODEL_NEO_WATER_VALVE = "NeoWaterValve"
MODEL_FRANKEVER_WATER_VALVE = "WaterValve"

View File

@@ -186,6 +186,9 @@ def async_setup_rpc_attribute_entities(
for key in key_instances:
# Filter non-existing sensors
if description.models and coordinator.model not in description.models:
continue
if description.role and description.role != coordinator.device.config[
key
].get("role", "generic"):
@@ -316,6 +319,7 @@ class RpcEntityDescription(EntityDescription):
options_fn: Callable[[dict], list[str]] | None = None
entity_class: Callable | None = None
role: str | None = None
models: set[str] | None = None
@dataclass(frozen=True)

View File

@@ -17,11 +17,15 @@ from homeassistant.components.valve import (
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry
from .const import MODEL_FRANKEVER_WATER_VALVE, MODEL_NEO_WATER_VALVE
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
from .entity import (
BlockEntityDescription,
RpcEntityDescription,
ShellyBlockAttributeEntity,
ShellyRpcAttributeEntity,
async_setup_block_attribute_entities,
async_setup_entry_rpc,
)
from .utils import async_remove_shelly_entity, get_device_entry_gen
@@ -33,6 +37,11 @@ class BlockValveDescription(BlockEntityDescription, ValveEntityDescription):
"""Class to describe a BLOCK valve."""
@dataclass(kw_only=True, frozen=True)
class RpcValveDescription(RpcEntityDescription, ValveEntityDescription):
"""Class to describe a RPC virtual valve."""
GAS_VALVE = BlockValveDescription(
key="valve|valve",
name="Valve",
@@ -41,6 +50,83 @@ GAS_VALVE = BlockValveDescription(
)
class RpcShellyBaseWaterValve(ShellyRpcAttributeEntity, ValveEntity):
"""Base Entity for RPC Shelly Water Valves."""
entity_description: RpcValveDescription
_attr_device_class = ValveDeviceClass.WATER
_id: int
def __init__(
self,
coordinator: ShellyRpcCoordinator,
key: str,
attribute: str,
description: RpcEntityDescription,
) -> None:
"""Initialize RPC water valve."""
super().__init__(coordinator, key, attribute, description)
self._attr_name = None # Main device entity
class RpcShellyWaterValve(RpcShellyBaseWaterValve):
"""Entity that controls a valve on RPC Shelly Water Valve."""
_attr_supported_features = (
ValveEntityFeature.OPEN
| ValveEntityFeature.CLOSE
| ValveEntityFeature.SET_POSITION
)
_attr_reports_position = True
@property
def current_valve_position(self) -> int:
"""Return current position of valve."""
return cast(int, self.attribute_value)
async def async_set_valve_position(self, position: int) -> None:
"""Move the valve to a specific position."""
await self.coordinator.device.number_set(self._id, position)
class RpcShellyNeoWaterValve(RpcShellyBaseWaterValve):
"""Entity that controls a valve on RPC Shelly NEO Water Valve."""
_attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
_attr_reports_position = False
@property
def is_closed(self) -> bool | None:
"""Return if the valve is closed or not."""
return not self.attribute_value
async def async_open_valve(self, **kwargs: Any) -> None:
"""Open valve."""
await self.coordinator.device.boolean_set(self._id, True)
async def async_close_valve(self, **kwargs: Any) -> None:
"""Close valve."""
await self.coordinator.device.boolean_set(self._id, False)
RPC_VALVES: dict[str, RpcValveDescription] = {
"water_valve": RpcValveDescription(
key="number",
sub_key="value",
role="position",
entity_class=RpcShellyWaterValve,
models={MODEL_FRANKEVER_WATER_VALVE},
),
"neo_water_valve": RpcValveDescription(
key="boolean",
sub_key="value",
role="state",
entity_class=RpcShellyNeoWaterValve,
models={MODEL_NEO_WATER_VALVE},
),
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ShellyConfigEntry,
@@ -48,7 +134,24 @@ async def async_setup_entry(
) -> None:
"""Set up valves for device."""
if get_device_entry_gen(config_entry) in BLOCK_GENERATIONS:
async_setup_block_entry(hass, config_entry, async_add_entities)
return async_setup_block_entry(hass, config_entry, async_add_entities)
return async_setup_rpc_entry(hass, config_entry, async_add_entities)
@callback
def async_setup_rpc_entry(
hass: HomeAssistant,
config_entry: ShellyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
coordinator = config_entry.runtime_data.rpc
assert coordinator
async_setup_entry_rpc(
hass, config_entry, async_add_entities, RPC_VALVES, RpcShellyWaterValve
)
@callback

View File

@@ -50,7 +50,10 @@ async def async_setup_entry(
_LOGGER.debug("disk arguments to be added: %s", disk_arguments)
coordinator: SystemMonitorCoordinator = SystemMonitorCoordinator(
hass, entry, psutil_wrapper, disk_arguments
hass,
entry,
psutil_wrapper,
disk_arguments,
)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = SystemMonitorData(coordinator, psutil_wrapper)

View File

@@ -8,7 +8,7 @@ import logging
import os
from typing import TYPE_CHECKING, Any, NamedTuple
from psutil import Process
from psutil import AccessDenied, NoSuchProcess, Process
from psutil._common import sdiskusage, shwtemp, snetio, snicaddr, sswap
import psutil_home_assistant as ha_psutil
@@ -40,6 +40,7 @@ class SensorData:
boot_time: datetime
processes: list[Process]
temperatures: dict[str, list[shwtemp]]
process_fds: dict[str, int]
def as_dict(self) -> dict[str, Any]:
"""Return as dict."""
@@ -66,6 +67,7 @@ class SensorData:
"boot_time": str(self.boot_time),
"processes": str(self.processes),
"temperatures": temperatures,
"process_fds": str(self.process_fds),
}
@@ -161,6 +163,7 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
boot_time=_data["boot_time"],
processes=_data["processes"],
temperatures=_data["temperatures"],
process_fds=_data["process_fds"],
)
def update_data(self) -> dict[str, Any]:
@@ -233,6 +236,28 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
)
continue
# Collect file descriptor counts only for selected processes
process_fds: dict[str, int] = {}
for proc in selected_processes:
try:
process_name = proc.name()
# Our sensors are a per-process name aggregation. Not ideal, but the only
# way to do it without user specifying PIDs which are not static.
process_fds[process_name] = (
process_fds.get(process_name, 0) + proc.num_fds()
)
except (NoSuchProcess, AccessDenied):
_LOGGER.warning(
"Failed to get file descriptor count for process %s: access denied or process not found",
proc.pid,
)
except OSError as err:
_LOGGER.warning(
"OS error getting file descriptor count for process %s: %s",
proc.pid,
err,
)
temps: dict[str, list[shwtemp]] = {}
if self.update_subscribers[("temperatures", "")] or self._initial_update:
try:
@@ -250,4 +275,5 @@ class SystemMonitorCoordinator(TimestampDataUpdateCoordinator[SensorData]):
"boot_time": self.boot_time,
"processes": selected_processes,
"temperatures": temps,
"process_fds": process_fds,
}

View File

@@ -37,7 +37,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util import slugify
from . import SystemMonitorConfigEntry
from .const import DOMAIN, NET_IO_TYPES
from .binary_sensor import BINARY_SENSOR_DOMAIN
from .const import CONF_PROCESS, DOMAIN, NET_IO_TYPES
from .coordinator import SystemMonitorCoordinator
from .util import get_all_disk_mounts, get_all_network_interfaces, read_cpu_temperature
@@ -125,6 +126,12 @@ def get_ip_address(
return None
def get_process_num_fds(entity: SystemMonitorSensor) -> int | None:
"""Return the number of file descriptors opened by the process."""
process_fds = entity.coordinator.data.process_fds
return process_fds.get(entity.argument)
@dataclass(frozen=True, kw_only=True)
class SysMonitorSensorEntityDescription(SensorEntityDescription):
"""Describes System Monitor sensor entities."""
@@ -376,6 +383,16 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = {
value_fn=lambda entity: entity.coordinator.data.swap.percent,
add_to_update=lambda entity: ("swap", ""),
),
"process_num_fds": SysMonitorSensorEntityDescription(
key="process_num_fds",
translation_key="process_num_fds",
placeholder="process",
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
mandatory_arg=True,
value_fn=get_process_num_fds,
add_to_update=lambda entity: ("processes", ""),
),
}
@@ -482,6 +499,38 @@ async def async_setup_entry(
)
continue
if _type == "process_num_fds":
# Create sensors for processes configured in binary_sensor section
processes = entry.options.get(BINARY_SENSOR_DOMAIN, {}).get(
CONF_PROCESS, []
)
_LOGGER.debug(
"Creating process_num_fds sensors for processes: %s", processes
)
for process in processes:
argument = process
is_enabled = check_legacy_resource(
f"{_type}_{argument}", legacy_resources
)
unique_id = slugify(f"{_type}_{argument}")
loaded_resources.add(unique_id)
_LOGGER.debug(
"Creating process_num_fds sensor: type=%s, process=%s, unique_id=%s, enabled=%s",
_type,
process,
unique_id,
is_enabled,
)
entities.append(
SystemMonitorSensor(
coordinator,
sensor_description,
entry.entry_id,
argument,
is_enabled,
)
)
continue
# Ensure legacy imported disk_* resources are loaded if they are not part
# of mount points automatically discovered
for resource in legacy_resources:

View File

@@ -100,6 +100,9 @@
},
"swap_use_percent": {
"name": "Swap usage"
},
"process_num_fds": {
"name": "Open file descriptors {process}"
}
}
}

View File

@@ -100,8 +100,9 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
"""Return the currently set speed."""
current_level = self.device.state.fan_level
if self.device.state.mode == VS_FAN_MODE_MANUAL and current_level is not None:
if current_level == 0:
return 0
return ordered_list_item_to_percentage(
self.device.fan_levels, current_level
)

View File

@@ -80,3 +80,760 @@ Both apps are available as Home Assistant add-ons. There are also Docker contain
[running_zwave_js_server]: docs/running_z_wave_js_server.png "Running Z-Wave JS Server"
[//]: # (https://docs.google.com/drawings/d/1YhSVNuss3fa1VFTKQLaACxXg7y6qo742n2oYpdLRs7E/edit?usp=sharing)
## Config Flow
This section documents the Z-Wave JS integration config flow, showing how different entry points and steps interact.
Below is a diagram showing all steps and descriptions of each step. Afterwards, each entry point and step is described in detail.
```mermaid
graph TB
user[user] --> installation_type{installation_type<br/>menu}
installation_type --> intent_recommended[intent_recommended]
installation_type --> intent_custom[intent_custom]
intent_recommended --> on_supervisor[on_supervisor]
intent_custom --> on_supervisor
on_supervisor --> manual[manual]
on_supervisor --> configure_addon_user[configure_addon_user]
on_supervisor --> finish_addon_setup_user[finish_addon_setup_user]
on_supervisor --> install_addon[install_addon]
manual --> create_entry((create entry))
configure_addon_user --> network_type[network_type]
network_type --> configure_security_keys[configure_security_keys]
network_type --> start_addon[start_addon]
configure_security_keys --> start_addon
start_addon --> rf_region[rf_region]
rf_region --> start_addon
start_addon --> start_failed[start_failed]
start_addon --> finish_addon_setup[finish_addon_setup]
finish_addon_setup --> finish_addon_setup_user
finish_addon_setup_user --> create_entry
install_addon --> install_failed[install_failed]
install_addon --> configure_addon[configure_addon]
configure_addon --> configure_addon_user
zeroconf[zeroconf] --> zeroconf_confirm[zeroconf_confirm]
zeroconf_confirm --> manual
usb[usb] --> confirm_usb_migration[confirm_usb_migration]
usb --> installation_type
confirm_usb_migration --> intent_migrate[intent_migrate]
hassio[hassio] --> hassio_confirm[hassio_confirm]
hassio_confirm --> on_supervisor
esphome[esphome] --> installation_type
reconfigure[reconfigure] --> reconfigure_menu{reconfigure<br/>menu}
reconfigure_menu --> intent_reconfigure[intent_reconfigure]
reconfigure_menu --> intent_migrate
intent_reconfigure --> on_supervisor_reconfigure[on_supervisor_reconfigure]
intent_reconfigure --> manual_reconfigure[manual_reconfigure]
on_supervisor_reconfigure --> manual_reconfigure
on_supervisor_reconfigure --> install_addon
on_supervisor_reconfigure --> configure_addon_reconfigure[configure_addon_reconfigure]
configure_addon_reconfigure --> start_addon
configure_addon_reconfigure --> finish_addon_setup_reconfigure[finish_addon_setup_reconfigure]
finish_addon_setup --> finish_addon_setup_reconfigure
finish_addon_setup_reconfigure --> abort_reconfig((abort<br/>reconfigure_successful))
manual_reconfigure --> abort_reconfig
intent_migrate --> backup_nvm[backup_nvm]
backup_nvm --> backup_failed[backup_failed]
backup_nvm --> instruct_unplug[instruct_unplug]
instruct_unplug --> choose_serial_port[choose_serial_port]
instruct_unplug --> start_addon
choose_serial_port --> start_addon
finish_addon_setup --> finish_addon_setup_migrate[finish_addon_setup_migrate]
finish_addon_setup_migrate --> restore_nvm[restore_nvm]
restore_nvm --> restore_failed[restore_failed]
restore_failed --> restore_nvm
restore_nvm --> migration_done[migration_done]
style user fill:#e1f5ff
style zeroconf fill:#e1f5ff
style usb fill:#e1f5ff
style hassio fill:#e1f5ff
style esphome fill:#e1f5ff
style reconfigure fill:#e1f5ff
style create_entry fill:#c8e6c9
style abort_reconfig fill:#c8e6c9
style install_failed fill:#ffcdd2
style start_failed fill:#ffcdd2
style backup_failed fill:#ffcdd2
style migration_done fill:#c8e6c9
```
### Step Descriptions
#### Entry Points
- **`user`**
- Entry point when user manually adds the integration through UI
- Checks if running on Home Assistant Supervisor (Supervisor OS/Container)
- If on Supervisor: shows `installation_type` menu
- If not on Supervisor: goes directly to `manual` step
- **`zeroconf`**
- Entry point for Zeroconf/mDNS discovered Z-Wave JS servers
- Extracts `homeId` from discovery properties and sets as unique ID
- Aborts if already configured with same home ID
- Builds WebSocket URL from discovered host:port
- Shows `zeroconf_confirm` to user
- **`usb`**
- Entry point for USB-discovered Z-Wave controllers
- Only works on Home Assistant Supervisor (aborts with `discovery_requires_supervisor` otherwise)
- Allows multiple USB flows in progress (for migration scenarios)
- Filters out 2652 Zigbee sticks that share same VID/PID with some Z-Wave sticks
- Converts device path to `/dev/serial/by-id/` format for stability
- Checks if device is already configured in add-on
- Sets temporary unique ID based on USB identifiers
- If existing entries found: looks for add-on entry to enable migration
- If no existing entries: goes to new setup flow
- **`hassio`**
- Entry point when Z-Wave JS add-on announces itself via Supervisor discovery
- Validates this is the official Z-Wave JS add-on (checks slug)
- Builds WebSocket URL from discovery config
- Gets version info and home ID from server
- Sets unique ID to home ID
- If already configured: updates URL and aborts
- If new: shows `hassio_confirm`
- **`esphome`**
- Entry point for ESPHome devices with Z-Wave over socket support
- Only works on Home Assistant Supervisor
- Special handling if home ID exists in discovery:
- Looks for existing entry with matching home ID
- If entry uses socket connection: updates add-on config with new socket path and reloads
- Sets unique ID to home ID
- Stores socket path from discovery
- Sets `_adapter_discovered` flag to skip manual device selection
- Goes to `installation_type` menu
- **`reconfigure`**
- Entry point when user reconfigures existing config entry
- Stores reference to config entry being reconfigured
- Shows menu with two options:
- `intent_reconfigure`: Change connection settings
- `intent_migrate`: Migrate to different controller hardware
#### Menu Steps
- **`installation_type`**
- Menu shown on Supervisor when setting up integration
- Options:
- `intent_recommended`: Guided setup with add-on (auto-configures everything)
- `intent_custom`: Advanced setup (choose add-on or manual server)
#### Intent Steps
- **`intent_recommended`**
- User selected recommended installation
- Sets `_recommended_install` flag for automatic configuration
- Forces add-on usage: calls `on_supervisor` with `use_addon=True`
- **`intent_custom`**
- User selected custom installation
- If adapter was discovered (USB/ESPHome): forces add-on usage
- If no adapter discovered: goes to `on_supervisor` to ask user preference
- **`intent_reconfigure`**
- User wants to reconfigure connection settings (not migrate hardware)
- Checks if on Supervisor:
- Yes: goes to `on_supervisor_reconfigure`
- No: goes to `manual_reconfigure`
- **`intent_migrate`**
- User wants to migrate to different Z-Wave controller hardware
- Validates requirements:
- Adapter must be discovered OR existing entry must use add-on
- Config entry must be loaded (needs access to driver)
- Controller SDK version must be >= 6.61 (older versions don't support NVM export)
- Sets `_migrating` flag
- Starts migration: goes to `backup_nvm`
#### Configuration Steps - Supervisor Add-on Path
- **`on_supervisor`**
- Asks user if they want to use the Z-Wave JS add-on or manual server
- If user_input is None: shows form with checkbox for `use_addon` (default: True)
- If `use_addon=False`: goes to `manual` step
- If `use_addon=True`:
- Gets add-on info and checks state
- If add-on running: loads config from add-on, goes to `finish_addon_setup_user`
- If add-on not running: goes to `configure_addon_user`
- If add-on not installed: goes to `install_addon`
- **`configure_addon_user`**
- Collects USB path or ESPHome socket path for add-on
- If adapter was discovered: skips asking, uses stored path
- If no adapter discovered: shows form with:
- Optional USB path dropdown (populated from available USB ports)
- Optional socket path text field (for ESPHome or remote sockets)
- Goes to `network_type`
- **`network_type`**
- Asks if creating new Z-Wave network or using existing network
- If recommended install: automatically selects "new" (generates new keys)
- Shows form with options:
- `new`: Generate new security keys (blank keys)
- `existing`: Import existing network keys
- If new: clears all security keys and goes to `start_addon`
- If existing: goes to `configure_security_keys`
- **`configure_security_keys`**
- Collects security keys for existing Z-Wave network
- Shows form with optional fields for:
- S0 Legacy Key (32 hex chars)
- S2 Unauthenticated Key (32 hex chars)
- S2 Authenticated Key (32 hex chars)
- S2 Access Control Key (32 hex chars)
- Long Range S2 Authenticated Key (32 hex chars)
- Long Range S2 Access Control Key (32 hex chars)
- Pre-populates with existing add-on config if available
- Stores keys in config flow state
- Goes to `start_addon`
- **`rf_region`**
- Asks user to select RF region for Z-Wave controller
- Only shown if:
- Home Assistant country is not set
- Add-on RF region is not configured or set to "Automatic"
- Shows dropdown with regions:
- Australia/New Zealand, China, Europe, Hong Kong, India, Israel, Japan, Korea, Russia, USA
- Stores selected region in add-on config updates
- Returns to `start_addon`
#### Configuration Steps - Manual Server Path
- **`manual`**
- Collects WebSocket URL for external Z-Wave JS server
- Shows form with text field for URL (default: `ws://localhost:3000`)
- Validates input:
- URL must start with `ws://` or `wss://`
- Attempts connection to get version info
- On success:
- Sets unique ID to home ID from server
- If already configured: updates URL and aborts
- If new: creates config entry
- On error: shows error message and re-displays form
#### Progress Steps
- **`install_addon`**
- Progress step that installs Z-Wave JS add-on
- Creates background task to install add-on via Supervisor API
- Shows progress spinner to user
- On success:
- Sets `integration_created_addon` flag (for cleanup on removal)
- Goes to `configure_addon`
- On failure: goes to `install_failed`
- **`install_failed`**
- Add-on installation failed
- Aborts flow with reason `addon_install_failed`
- **`start_addon`**
- Progress step that starts or restarts Z-Wave JS add-on
- First checks if RF region needs to be selected:
- If country not set AND RF region not configured: goes to `rf_region`
- If there are pending add-on config updates: applies them before starting
- Creates background task (`_async_start_addon`):
- Starts or restarts add-on via Supervisor API
- Polls for up to 200 seconds (40 rounds × 5 seconds) waiting for server to respond
- Gets WebSocket URL from add-on discovery info
- Validates connection by getting version info
- On success: goes to `finish_addon_setup`
- On failure: goes to `start_failed`
- **`start_failed`**
- Add-on start/restart failed
- If migrating: aborts with `addon_start_failed`
- If reconfiguring: calls `async_revert_addon_config` to restore original config
- Otherwise: aborts with `addon_start_failed`
- **`backup_nvm`**
- Progress step that backs up Z-Wave controller NVM (non-volatile memory)
- Creates background task (`_async_backup_network`):
- Gets driver controller from config entry runtime data
- Registers progress callback to forward backup progress to UI (0-100%)
- Calls `controller.async_backup_nvm_raw()` to get raw NVM binary data
- Saves backup to file: `~/.homeassistant/zwavejs_nvm_backup_YYYY-MM-DD_HH-MM-SS.bin`
- On success: goes to `instruct_unplug`
- On failure: goes to `backup_failed`
- **`backup_failed`**
- NVM backup failed
- Aborts migration with reason `backup_failed`
- **`restore_nvm`**
- Progress step that restores NVM backup to new controller
- Creates background task (`_async_restore_network_backup`):
- Sets `keep_old_devices` flag to preserve device customizations
- Reloads config entry to reconnect to new controller
- Registers progress callbacks for convert (50%) and restore (50%) phases
- Calls `controller.async_restore_nvm()` with backup data
- Waits for driver ready event (with timeout)
- Gets new version info and updates config entry unique ID to new home ID
- Reloads entry again to clean up old controller device
- On success: goes to `migration_done`
- On failure: goes to `restore_failed`
- **`restore_failed`**
- NVM restore failed
- Shows form with:
- Error message
- Backup file path
- Download link for backup file (base64 encoded)
- Retry button
- If user retries: goes back to `restore_nvm`
#### Finish Steps
- **`configure_addon`**
- Router step that delegates to appropriate addon configuration
- If reconfiguring: goes to `configure_addon_reconfigure`
- Otherwise: goes to `configure_addon_user`
- **`finish_addon_setup`**
- Router step that delegates to appropriate finish logic
- If migrating: goes to `finish_addon_setup_migrate`
- If reconfiguring: goes to `finish_addon_setup_reconfigure`
- Otherwise: goes to `finish_addon_setup_user`
- **`finish_addon_setup_user`**
- Finalizes setup for new config entry
- Gets add-on discovery info if WebSocket URL not set
- Gets version info from server if not already fetched
- Sets unique ID to home ID
- For USB discovery: updates unique ID from temporary USB-based ID to home ID
- Checks if already configured: updates URL/paths and aborts
- Creates config entry with all collected data:
- WebSocket URL
- USB path
- Socket path
- All security keys
- Add-on flags
- Aborts any other in-progress flows
#### Confirmation Steps
- **`zeroconf_confirm`**
- Confirms adding Zeroconf-discovered server
- Shows form with home ID and WebSocket URL
- On confirmation: goes to `manual` with pre-filled URL
- **`confirm_usb_migration`**
- Confirms migrating to newly discovered USB controller
- Shows form with USB device title
- On confirmation: goes to `intent_migrate`
- **`hassio_confirm`**
- Confirms adding add-on discovered server
- Shows simple confirmation form
- On confirmation: goes to `on_supervisor` with `use_addon=True`
- **`instruct_unplug`**
- Instructs user to unplug old controller after backup
- Unloads config entry before asking (to release USB port)
- Shows form with backup file path
- On confirmation:
- If adapter was discovered: goes to `start_addon` (path already known)
- If adapter not discovered: goes to `choose_serial_port`
- **`choose_serial_port`**
- Shows available serial ports for new controller
- Gets list of USB ports
- Removes old controller path from list
- Adds "Use Socket" option for ESPHome/remote connections
- Shows form with:
- Optional USB path dropdown
- Optional socket path text field
- Stores selected path in add-on config updates
- Goes to `start_addon`
#### Reconfiguration Steps
- **`on_supervisor_reconfigure`**
- Asks if user wants add-on or manual server during reconfigure
- Shows form with `use_addon` checkbox (pre-filled with current value)
- If `use_addon=False`:
- If was using add-on: unloads entry and stops add-on
- Goes to `manual_reconfigure`
- If `use_addon=True`:
- If add-on not installed: goes to `install_addon`
- If add-on installed: goes to `configure_addon_reconfigure`
- **`manual_reconfigure`**
- Collects new WebSocket URL when reconfiguring manual setup
- Shows form with URL field (pre-filled with current URL)
- Validates connection and gets version info
- Verifies home ID matches existing config entry (prevents wrong device)
- Updates config entry with new URL
- Disables add-on handling flags
- Aborts with `reconfigure_successful`
- **`configure_addon_reconfigure`**
- Updates add-on configuration during reconfigure
- Gets current add-on config
- Shows form with:
- USB path dropdown (including "Use Socket" option)
- Socket path text field
- All six security key fields
- Pre-fills with current add-on config values
- On submit:
- Updates add-on config with new values
- If add-on running and no restart needed: goes to `finish_addon_setup_reconfigure`
- Otherwise: unloads entry and goes to `start_addon`
- **`finish_addon_setup_reconfigure`**
- Finalizes reconfiguration
- If there's a pending revert reason: reverts config and aborts
- Gets WebSocket URL from add-on discovery
- Gets version info from server
- Verifies home ID matches (prevents wrong device)
- Updates config entry with all new values
- Reloads config entry
- Aborts with `reconfigure_successful`
- On error: calls `async_revert_addon_config` to restore original config
#### Migration Finish Steps
- **`finish_addon_setup_migrate`**
- Finalizes migration to new controller
- Updates config entry with:
- New WebSocket URL
- New USB/socket path
- Same security keys
- New home ID as unique ID
- Note: Does NOT reload entry here (done in restore step)
- Goes to `restore_nvm`
- **`migration_done`**
- Migration completed successfully
- Aborts with `migration_successful`
### User Entry Point
Initial setup flow when user manually adds the integration:
```mermaid
graph TB
user[user] --> hassio_check{Is Supervisor?}
hassio_check -->|Yes| installation_type{installation_type<br/>menu}
hassio_check -->|No| manual[manual]
installation_type -->|Recommended| intent_recommended[intent_recommended]
installation_type -->|Custom| intent_custom[intent_custom]
intent_recommended --> use_addon_true[on_supervisor<br/>use_addon=True]
intent_custom --> adapter_check{Adapter<br/>discovered?}
adapter_check -->|Yes| use_addon_true
adapter_check -->|No| on_supervisor[on_supervisor<br/>ask use_addon]
on_supervisor -->|use_addon=False| manual
on_supervisor -->|use_addon=True| use_addon_true
use_addon_true --> addon_state{Add-on state?}
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
addon_state -->|Not Running| configure_addon_user[configure_addon_user]
addon_state -->|Not Installed| install_addon[install_addon]
install_addon -->|Success| configure_addon_user
install_addon -->|Fail| install_failed[install_failed<br/>abort]
configure_addon_user --> adapter_discovered{Adapter<br/>discovered?}
adapter_discovered -->|Yes| network_type[network_type]
adapter_discovered -->|No| ask_usb[Ask USB/Socket path]
ask_usb --> network_type
network_type --> recommended{Recommended<br/>install?}
recommended -->|Yes| start_addon[start_addon]
recommended -->|No| ask_network[Ask network type]
ask_network -->|New| start_addon
ask_network -->|Existing| configure_security_keys[configure_security_keys]
configure_security_keys --> start_addon
start_addon --> rf_region_check{Country not set<br/>& RF region not<br/>configured?}
rf_region_check -->|Yes| rf_region[rf_region]
rf_region_check -->|No| start_progress[Start add-on]
rf_region --> start_progress
start_progress -->|Success| finish_addon_setup_user
start_progress -->|Fail| start_failed[start_failed<br/>abort]
finish_addon_setup_user --> finalize[Get version info<br/>Set unique ID<br/>Create entry]
finalize --> create_entry((create entry))
manual --> ask_url[Ask WebSocket URL<br/>Validate connection]
ask_url -->|Success| create_entry
ask_url -->|Fail| ask_url
style user fill:#e1f5ff
style create_entry fill:#c8e6c9
style install_failed fill:#ffcdd2
style start_failed fill:#ffcdd2
```
### USB Discovery Entry Point
Flow triggered when a USB Z-Wave stick is discovered:
```mermaid
graph TB
usb[usb discovery] --> supervisor_check{Is Supervisor?}
supervisor_check -->|No| abort_supervisor[abort<br/>discovery_requires_supervisor]
supervisor_check -->|Yes| flow_check{Non-USB flows<br/>in progress?}
flow_check -->|Yes| abort_progress[abort<br/>already_in_progress]
flow_check -->|No| existing_check{Existing<br/>entries?}
existing_check -->|No| setup_temp[Set temp unique ID<br/>Store USB path]
existing_check -->|Yes| find_addon_entry{Entry with<br/>use_addon=True<br/>exists?}
find_addon_entry -->|No| abort_addon_req[abort<br/>addon_required]
find_addon_entry -->|Yes| check_configured{Device already<br/>configured in<br/>add-on?}
check_configured -->|Yes| abort_configured[abort<br/>already_configured]
check_configured -->|No| setup_temp
setup_temp --> entries_exist{Existing<br/>entries?}
entries_exist -->|Yes| confirm_usb_migration[confirm_usb_migration]
entries_exist -->|No| installation_type{installation_type<br/>menu}
confirm_usb_migration -->|Confirm| intent_migrate[intent_migrate]
confirm_usb_migration -->|Cancel| abort_user[User aborts]
installation_type -->|Recommended| intent_recommended[intent_recommended]
installation_type -->|Custom| intent_custom[intent_custom]
intent_recommended --> on_supervisor[on_supervisor<br/>use_addon=True]
intent_custom --> on_supervisor
on_supervisor --> addon_state{Add-on state?}
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
addon_state -->|Not Running| network_type[network_type]
addon_state -->|Not Installed| install_addon[install_addon]
install_addon --> configure_addon_user[configure_addon_user]
configure_addon_user --> network_type
network_type --> recommended{Recommended?}
recommended -->|Yes| start_addon[start_addon]
recommended -->|No| ask_network[Ask network type]
ask_network -->|New| start_addon
ask_network -->|Existing| configure_security_keys[configure_security_keys]
configure_security_keys --> start_addon
start_addon --> rf_check{Country not set<br/>& RF region not<br/>configured?}
rf_check -->|Yes| rf_region[rf_region]
rf_check -->|No| start_progress[Start add-on]
rf_region --> start_progress
start_progress --> finish_addon_setup[finish_addon_setup]
finish_addon_setup --> finish_addon_setup_user
finish_addon_setup_user --> finalize[Update unique ID<br/>Create entry]
finalize --> create_entry((create entry))
intent_migrate --> migration_flow[See Migration flow]
style usb fill:#e1f5ff
style create_entry fill:#c8e6c9
style abort_supervisor fill:#ffcdd2
style abort_progress fill:#ffcdd2
style abort_addon_req fill:#ffcdd2
style abort_configured fill:#ffcdd2
style migration_flow fill:#fff9c4
```
### Zeroconf Discovery Entry Point
Flow triggered when Z-Wave JS server is discovered via Zeroconf:
```mermaid
graph TB
zeroconf[zeroconf discovery] --> setup[Extract home_id<br/>Set unique ID<br/>Store WebSocket URL]
setup --> check_configured{Already<br/>configured?}
check_configured -->|Yes| abort_configured[abort<br/>already_configured]
check_configured -->|No| zeroconf_confirm[zeroconf_confirm]
zeroconf_confirm -->|Confirm| manual[manual<br/>with stored URL]
zeroconf_confirm -->|Cancel| abort_user[User aborts]
manual --> validate[Validate connection<br/>Get version info]
validate -->|Success| create_entry((create entry))
validate -->|Fail| manual
style zeroconf fill:#e1f5ff
style create_entry fill:#c8e6c9
style abort_configured fill:#ffcdd2
```
### Add-on Discovery Entry Point (hassio)
Flow triggered when the Z-Wave JS add-on reports its availability:
```mermaid
graph TB
hassio[hassio discovery] --> flow_check{Other flows<br/>in progress?}
flow_check -->|Yes| abort_progress[abort<br/>already_in_progress]
flow_check -->|No| slug_check{Is Z-Wave JS<br/>add-on?}
slug_check -->|No| abort_slug[abort<br/>not_zwave_js_addon]
slug_check -->|Yes| validate[Build WebSocket URL<br/>Get version info<br/>Set unique ID]
validate -->|Fail| abort_connect[abort<br/>cannot_connect]
validate -->|Success| check_configured{Already<br/>configured?}
check_configured -->|Yes| update_abort[Update URL<br/>abort already_configured]
check_configured -->|No| hassio_confirm[hassio_confirm]
hassio_confirm -->|Confirm| on_supervisor[on_supervisor<br/>use_addon=True]
hassio_confirm -->|Cancel| abort_user[User aborts]
on_supervisor --> addon_state{Add-on state?}
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
addon_state -->|Not Running| configure_addon_user[configure_addon_user]
addon_state -->|Not Installed| install_addon[install_addon]
install_addon --> configure_addon_user
configure_addon_user --> network_type[network_type]
network_type --> start_addon[start_addon]
start_addon --> finish_addon_setup[finish_addon_setup]
finish_addon_setup --> finish_addon_setup_user
finish_addon_setup_user --> create_entry((create entry))
style hassio fill:#e1f5ff
style create_entry fill:#c8e6c9
style abort_progress fill:#ffcdd2
style abort_slug fill:#ffcdd2
style abort_connect fill:#ffcdd2
```
### ESPHome Discovery Entry Point
Flow triggered when an ESPHome device with Z-Wave support is discovered:
```mermaid
graph TB
esphome[esphome discovery] --> supervisor_check{Is Supervisor?}
supervisor_check -->|No| abort_hassio[abort<br/>not_hassio]
supervisor_check -->|Yes| match_check{Home ID exists<br/>& matching entry<br/>with socket?}
match_check -->|Yes| update_reload[Update add-on config<br/>Reload entry]
match_check -->|No| setup_discovery[Set unique ID<br/>Store socket path<br/>Set adapter_discovered]
update_reload --> abort_configured[abort<br/>already_configured]
setup_discovery --> installation_type{installation_type<br/>menu}
installation_type -->|Recommended| intent_recommended[intent_recommended]
installation_type -->|Custom| intent_custom[intent_custom]
intent_recommended --> on_supervisor[on_supervisor<br/>use_addon=True]
intent_custom --> on_supervisor
on_supervisor --> addon_state{Add-on state?}
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
addon_state -->|Not Running| network_type[network_type]
addon_state -->|Not Installed| install_addon[install_addon]
install_addon --> configure_addon_user[configure_addon_user]
configure_addon_user --> network_type
network_type --> start_addon[start_addon]
start_addon --> finish_addon_setup[finish_addon_setup]
finish_addon_setup --> finish_addon_setup_user
finish_addon_setup_user --> unique_id_check{Unique ID set<br/>& matching USB<br/>entry?}
unique_id_check -->|Yes| update_reload
unique_id_check -->|No| create_entry((create entry))
style esphome fill:#e1f5ff
style create_entry fill:#c8e6c9
style abort_hassio fill:#ffcdd2
style abort_configured fill:#ffcdd2
```
### Reconfigure Entry Point
Flow triggered when user reconfigures an existing entry:
```mermaid
graph TB
reconfigure[reconfigure] --> reconfigure_menu{reconfigure<br/>menu}
reconfigure_menu -->|Reconfigure| intent_reconfigure[intent_reconfigure]
reconfigure_menu -->|Migrate| intent_migrate[intent_migrate]
intent_reconfigure --> supervisor_check{Is Supervisor?}
supervisor_check -->|No| manual_reconfigure[manual_reconfigure]
supervisor_check -->|Yes| on_supervisor_reconfigure[on_supervisor_reconfigure]
on_supervisor_reconfigure --> ask_use_addon{Use add-on?}
ask_use_addon -->|No & was using| stop_addon[Unload entry<br/>Stop add-on]
ask_use_addon -->|No| manual_reconfigure
stop_addon -->|Fail| abort_stop[abort<br/>addon_stop_failed]
stop_addon -->|Success| manual_reconfigure
ask_use_addon -->|Yes| addon_state{Add-on state?}
addon_state -->|Not Installed| install_addon[install_addon]
addon_state -->|Installed| configure_addon_reconfigure[configure_addon_reconfigure]
install_addon --> configure_addon_reconfigure
configure_addon_reconfigure --> update_config[Ask USB/Socket/Keys<br/>Update add-on config]
update_config --> running_check{Add-on running<br/>& no restart<br/>needed?}
running_check -->|Yes| finish_addon_setup_reconfigure[finish_addon_setup_reconfigure]
running_check -->|No| unload_start[Unload entry if needed<br/>Start add-on]
unload_start --> rf_check{Country not set<br/>& RF region not<br/>configured?}
rf_check -->|Yes| rf_region[rf_region]
rf_check -->|No| start_addon[start_addon]
rf_region --> start_addon
start_addon -->|Fail| revert_start[Revert config<br/>abort addon_start_failed]
start_addon -->|Success| finish_addon_setup[finish_addon_setup]
finish_addon_setup --> finish_addon_setup_reconfigure
finish_addon_setup_reconfigure --> validate[Get WebSocket URL<br/>Get version info<br/>Check home ID]
validate -->|Cannot connect| revert_connect[Revert config<br/>abort cannot_connect]
validate -->|Wrong device| revert_device[Revert config<br/>abort different_device]
validate -->|Success| update_reload[Update entry<br/>Reload entry]
update_reload --> abort_success[abort<br/>reconfigure_successful]
manual_reconfigure --> ask_validate[Ask WebSocket URL<br/>Validate connection]
ask_validate -->|Fail| ask_validate
ask_validate -->|Success| check_home_id{Home ID<br/>matches?}
check_home_id -->|No| abort_different[abort<br/>different_device]
check_home_id -->|Yes| update_manual[Update entry<br/>Disable add-on]
update_manual --> abort_success
style reconfigure fill:#e1f5ff
style abort_success fill:#c8e6c9
style abort_stop fill:#ffcdd2
style abort_different fill:#ffcdd2
style revert_start fill:#ffcdd2
style revert_connect fill:#ffcdd2
style revert_device fill:#ffcdd2
```

View File

@@ -1,12 +1,27 @@
"""Tests for Shelly valve platform."""
from copy import deepcopy
from unittest.mock import Mock
from aioshelly.const import MODEL_GAS
import pytest
from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE
from homeassistant.components.shelly.const import (
MODEL_FRANKEVER_WATER_VALVE,
MODEL_NEO_WATER_VALVE,
)
from homeassistant.components.valve import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DOMAIN as VALVE_DOMAIN,
ValveState,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_CLOSE_VALVE,
SERVICE_OPEN_VALVE,
SERVICE_SET_VALVE_POSITION,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_registry import EntityRegistry
@@ -64,3 +79,157 @@ async def test_block_device_gas_valve(
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.CLOSED
async def test_rpc_water_valve(
hass: HomeAssistant,
entity_registry: EntityRegistry,
mock_rpc_device: Mock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test RPC device Shelly Water Valve."""
config = deepcopy(mock_rpc_device.config)
config["number:200"] = {
"name": "Position",
"min": 0,
"max": 100,
"meta": {"ui": {"step": 10, "view": "slider", "unit": "%"}},
"role": "position",
}
monkeypatch.setattr(mock_rpc_device, "config", config)
status = deepcopy(mock_rpc_device.status)
status["number:200"] = {"value": 0}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3, model=MODEL_FRANKEVER_WATER_VALVE)
entity_id = "valve.test_name"
assert (entry := entity_registry.async_get(entity_id))
assert entry.unique_id == "123456789ABC-number:200-water_valve"
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.CLOSED
# Open valve
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_OPEN_VALVE,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_rpc_device.number_set.assert_called_once_with(200, 100)
status["number:200"] = {"value": 100}
monkeypatch.setattr(mock_rpc_device, "status", status)
mock_rpc_device.mock_update()
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.OPEN
# Close valve
mock_rpc_device.number_set.reset_mock()
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_CLOSE_VALVE,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_rpc_device.number_set.assert_called_once_with(200, 0)
status["number:200"] = {"value": 0}
monkeypatch.setattr(mock_rpc_device, "status", status)
mock_rpc_device.mock_update()
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.CLOSED
# Set valve position to 50%
mock_rpc_device.number_set.reset_mock()
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_SET_VALVE_POSITION,
{ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50},
blocking=True,
)
mock_rpc_device.number_set.assert_called_once_with(200, 50)
status["number:200"] = {"value": 50}
monkeypatch.setattr(mock_rpc_device, "status", status)
mock_rpc_device.mock_update()
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.OPEN
assert state.attributes.get(ATTR_CURRENT_POSITION) == 50
async def test_rpc_neo_water_valve(
hass: HomeAssistant,
entity_registry: EntityRegistry,
mock_rpc_device: Mock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test RPC device Shelly NEO Water Valve."""
config = deepcopy(mock_rpc_device.config)
config["boolean:200"] = {
"name": "State",
"meta": {"ui": {"view": "toggle"}},
"role": "state",
}
monkeypatch.setattr(mock_rpc_device, "config", config)
status = deepcopy(mock_rpc_device.status)
status["boolean:200"] = {"value": False}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3, model=MODEL_NEO_WATER_VALVE)
entity_id = "valve.test_name"
assert (entry := entity_registry.async_get(entity_id))
assert entry.unique_id == "123456789ABC-boolean:200-neo_water_valve"
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.CLOSED
# Open valve
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_OPEN_VALVE,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_rpc_device.boolean_set.assert_called_once_with(200, True)
status["boolean:200"] = {"value": True}
monkeypatch.setattr(mock_rpc_device, "status", status)
mock_rpc_device.mock_update()
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.OPEN
# Close valve
mock_rpc_device.boolean_set.reset_mock()
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_CLOSE_VALVE,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_rpc_device.boolean_set.assert_called_once_with(200, False)
status["boolean:200"] = {"value": False}
monkeypatch.setattr(mock_rpc_device, "status", status)
mock_rpc_device.mock_update()
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id))
assert state.state == ValveState.CLOSED

View File

@@ -27,12 +27,20 @@ def mock_sys_platform() -> Generator[None]:
class MockProcess(Process):
"""Mock a Process class."""
def __init__(self, name: str, ex: bool = False) -> None:
def __init__(
self,
name: str,
ex: bool = False,
num_fds: int | None = None,
raise_os_error: bool = False,
) -> None:
"""Initialize the process."""
super().__init__(1)
self._name = name
self._ex = ex
self._create_time = 1708700400
self._num_fds = num_fds
self._raise_os_error = raise_os_error
def name(self):
"""Return a name."""
@@ -40,6 +48,25 @@ class MockProcess(Process):
raise NoSuchProcess(1, self._name)
return self._name
def num_fds(self):
"""Return the number of file descriptors opened by this process."""
if self._ex:
raise NoSuchProcess(1, self._name)
if self._raise_os_error:
raise OSError("Permission denied")
# Use explicit num_fds if provided, otherwise use defaults
if self._num_fds is not None:
return self._num_fds
# Return different values for different processes for testing
if self._name == "python3":
return 42
if self._name == "pip":
return 15
return 10
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock]:

View File

@@ -22,6 +22,7 @@
}),
'load': '(1, 2, 3)',
'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)',
'process_fds': "{'python3': 42, 'pip': 15}",
'processes': "[tests.components.systemmonitor.conftest.MockProcess(pid=1, name='python3', status='sleeping', started='2024-02-23 15:00:00'), tests.components.systemmonitor.conftest.MockProcess(pid=1, name='pip', status='sleeping', started='2024-02-23 15:00:00')]",
'swap': 'sswap(total=104857600, used=62914560, free=41943040, percent=60.0, sin=1, sout=1)',
'temperatures': dict({
@@ -79,6 +80,7 @@
'io_counters': None,
'load': '(1, 2, 3)',
'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)',
'process_fds': "{'python3': 42, 'pip': 15}",
'processes': "[tests.components.systemmonitor.conftest.MockProcess(pid=1, name='python3', status='sleeping', started='2024-02-23 15:00:00'), tests.components.systemmonitor.conftest.MockProcess(pid=1, name='pip', status='sleeping', started='2024-02-23 15:00:00')]",
'swap': 'sswap(total=104857600, used=62914560, free=41943040, percent=60.0, sin=1, sout=1)',
'temperatures': dict({

View File

@@ -114,16 +114,6 @@
# name: test_sensor[System Monitor Last boot - state]
'2024-02-24T15:00:00+00:00'
# ---
# name: test_sensor[System Monitor Load (15 min) - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Load (15 min)',
'icon': 'mdi:cpu-64-bit',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
})
# ---
# name: test_sensor[System Monitor Load (15 min) - state]
'3'
# ---
# name: test_sensor[System Monitor Load (1 min) - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Load (1 min)',
@@ -134,6 +124,16 @@
# name: test_sensor[System Monitor Load (1 min) - state]
'1'
# ---
# name: test_sensor[System Monitor Load (15 min) - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Load (15 min)',
'icon': 'mdi:cpu-64-bit',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
})
# ---
# name: test_sensor[System Monitor Load (15 min) - state]
'3'
# ---
# name: test_sensor[System Monitor Load (5 min) - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Load (5 min)',
@@ -264,6 +264,24 @@
# name: test_sensor[System Monitor Network throughput out eth1 - state]
'unknown'
# ---
# name: test_sensor[System Monitor Open file descriptors pip - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Open file descriptors pip',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
})
# ---
# name: test_sensor[System Monitor Open file descriptors pip - state]
'15'
# ---
# name: test_sensor[System Monitor Open file descriptors python3 - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Open file descriptors python3',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
})
# ---
# name: test_sensor[System Monitor Open file descriptors python3 - state]
'42'
# ---
# name: test_sensor[System Monitor Packets in eth0 - attributes]
ReadOnlyDict({
'friendly_name': 'System Monitor Packets in eth0',

View File

@@ -18,6 +18,8 @@ from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from .conftest import MockProcess
from tests.common import MockConfigEntry, async_fire_time_changed
@@ -420,6 +422,107 @@ async def test_cpu_percentage_is_zero_returns_unknown(
assert cpu_sensor.state == "15"
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_python3_num_fds(
hass: HomeAssistant,
mock_psutil: Mock,
mock_os: Mock,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test python3 open file descriptors sensor."""
mock_config_entry = MockConfigEntry(
title="System Monitor",
domain=DOMAIN,
data={},
options={
"binary_sensor": {"process": ["python3", "pip"]},
"resources": [
"disk_use_percent_/",
"disk_use_percent_/home/notexist/",
"memory_free_",
"network_out_eth0",
"process_num_fds_python3",
],
},
)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
num_fds_sensor = hass.states.get(
"sensor.system_monitor_open_file_descriptors_python3"
)
assert num_fds_sensor is not None
assert num_fds_sensor.state == "42"
assert num_fds_sensor.attributes == {
"state_class": "measurement",
"friendly_name": "System Monitor Open file descriptors python3",
}
_process = MockProcess("python3", num_fds=5)
assert _process.num_fds() == 5
mock_psutil.process_iter.return_value = [_process]
freezer.tick(timedelta(minutes=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
num_fds_sensor = hass.states.get(
"sensor.system_monitor_open_file_descriptors_python3"
)
assert num_fds_sensor is not None
assert num_fds_sensor.state == "5"
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_python3_num_fds_os_error(
hass: HomeAssistant,
mock_psutil: Mock,
mock_os: Mock,
freezer: FrozenDateTimeFactory,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test python3 open file descriptors sensor handles OSError gracefully."""
mock_config_entry = MockConfigEntry(
title="System Monitor",
domain=DOMAIN,
data={},
options={
"binary_sensor": {"process": ["python3", "pip"]},
"resources": [
"process_num_fds_python3",
],
},
)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
num_fds_sensor = hass.states.get(
"sensor.system_monitor_open_file_descriptors_python3"
)
assert num_fds_sensor is not None
assert num_fds_sensor.state == "42"
_process = MockProcess("python3", raise_os_error=True)
mock_psutil.process_iter.return_value = [_process]
freezer.tick(timedelta(minutes=1))
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
# Sensor should still exist but have no data (unavailable or previous value)
num_fds_sensor = hass.states.get(
"sensor.system_monitor_open_file_descriptors_python3"
)
assert num_fds_sensor is not None
assert num_fds_sensor.state == STATE_UNKNOWN
# Check that warning was logged
assert "OS error getting file descriptor count for process 1" in caplog.text
async def test_remove_obsolete_entities(
hass: HomeAssistant,
mock_psutil: Mock,
@@ -440,7 +543,7 @@ async def test_remove_obsolete_entities(
mock_added_config_entry.entry_id
)
)
== 37
== 39
)
entity_registry.async_update_entity(
@@ -481,7 +584,7 @@ async def test_remove_obsolete_entities(
mock_added_config_entry.entry_id
)
)
== 38
== 40
)
assert (

View File

@@ -147,17 +147,6 @@ def mock_multiple_device_responses(
)
def mock_air_purifier_400s_update_response(aioclient_mock: AiohttpClientMocker) -> None:
"""Build a response for the Helpers.call_api method for air_purifier_400s with updated data."""
device_name = "Air Purifier 400s"
for fixture in DEVICE_FIXTURES[device_name]:
getattr(aioclient_mock, fixture[0])(
f"https://smartapi.vesync.com{fixture[1]}",
json=load_json_object_fixture("air-purifier-detail-updated.json", DOMAIN),
)
def mock_device_response(
aioclient_mock: AiohttpClientMocker, device_name: str, override: Any
) -> None:

View File

@@ -1,97 +0,0 @@
"""Tests for the coordinator."""
from datetime import timedelta
from freezegun.api import FrozenDateTimeFactory
from homeassistant.components.vesync.const import DOMAIN, UPDATE_INTERVAL
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from .common import (
mock_air_purifier_400s_update_response,
mock_device_response,
mock_multiple_device_responses,
mock_outlet_energy_response,
)
from tests.common import MockConfigEntry, async_fire_time_changed
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_entity_update(
hass: HomeAssistant,
freezer: FrozenDateTimeFactory,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test Vesync coordinator data update.
This test sets up a single device `Air Purifier 400s` and then updates it via the coordinator.
"""
config_data = {CONF_PASSWORD: "username", CONF_USERNAME: "password"}
config_entry = MockConfigEntry(
data=config_data,
domain=DOMAIN,
unique_id="vesync_unique_id_1",
entry_id="1",
)
mock_multiple_device_responses(aioclient_mock, ["Air Purifier 400s", "Outlet"])
mock_outlet_energy_response(aioclient_mock, "Outlet")
expected_entities = [
# From "Air Purifier 400s"
"fan.air_purifier_400s",
"sensor.air_purifier_400s_filter_lifetime",
"sensor.air_purifier_400s_air_quality",
"sensor.air_purifier_400s_pm2_5",
# From Outlet
"switch.outlet",
"sensor.outlet_current_power",
"sensor.outlet_energy_use_today",
"sensor.outlet_energy_use_weekly",
"sensor.outlet_energy_use_monthly",
"sensor.outlet_energy_use_yearly",
"sensor.outlet_current_voltage",
]
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
for entity_id in expected_entities:
assert hass.states.get(entity_id).state != STATE_UNAVAILABLE
assert hass.states.get("sensor.air_purifier_400s_pm2_5").state == "5"
assert hass.states.get("sensor.air_purifier_400s_air_quality").state == "excellent"
assert hass.states.get("sensor.outlet_current_voltage").state == "120.0"
assert hass.states.get("sensor.outlet_energy_use_weekly").state == "0.0"
# Update the mock responses
aioclient_mock.clear_requests()
mock_air_purifier_400s_update_response(aioclient_mock)
mock_device_response(aioclient_mock, "Outlet", {"voltage": 129})
mock_outlet_energy_response(aioclient_mock, "Outlet", {"totalEnergy": 2.2})
freezer.tick(timedelta(seconds=UPDATE_INTERVAL))
async_fire_time_changed(hass)
await hass.async_block_till_done(True)
assert hass.states.get("sensor.air_purifier_400s_pm2_5").state == "15"
assert hass.states.get("sensor.air_purifier_400s_air_quality").state == "good"
assert hass.states.get("sensor.outlet_current_voltage").state == "129.0"
assert hass.states.get("sensor.outlet_energy_use_weekly").state == "0.0"
# energy history only updates once every 6 hours.
freezer.tick(timedelta(hours=6))
async_fire_time_changed(hass)
await hass.async_block_till_done(True)
assert hass.states.get("sensor.air_purifier_400s_pm2_5").state == "15"
assert hass.states.get("sensor.air_purifier_400s_air_quality").state == "good"
assert hass.states.get("sensor.outlet_current_voltage").state == "129.0"
assert hass.states.get("sensor.outlet_energy_use_weekly").state == "2.2"