mirror of
https://github.com/home-assistant/core.git
synced 2026-01-10 09:37:16 +01:00
Compare commits
67 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
607b09ccdd | ||
|
|
76b65c5779 | ||
|
|
632d44c7b7 | ||
|
|
1b36a34ae4 | ||
|
|
0afb849e7f | ||
|
|
5f97937ba0 | ||
|
|
ca48148150 | ||
|
|
9142fa1aa6 | ||
|
|
615a346a39 | ||
|
|
d6e1bc3e75 | ||
|
|
4e3414fc8a | ||
|
|
2553b0d1e0 | ||
|
|
2d8bb8e6d2 | ||
|
|
e365f807c1 | ||
|
|
87504806b1 | ||
|
|
e742711a76 | ||
|
|
667a87988d | ||
|
|
4337dd6864 | ||
|
|
fc286900d3 | ||
|
|
908e044db1 | ||
|
|
bb60286ed9 | ||
|
|
10799952af | ||
|
|
f00f3d6b0c | ||
|
|
d4dc7f806c | ||
|
|
c254b71559 | ||
|
|
d90a3b6c42 | ||
|
|
da3ee0aa61 | ||
|
|
eb17b68ad3 | ||
|
|
2243855209 | ||
|
|
82df4a3a4d | ||
|
|
bc2ac65b1e | ||
|
|
9bc04d7b5c | ||
|
|
b620c53662 | ||
|
|
ab35ceab5a | ||
|
|
0727c7b9e8 | ||
|
|
3331b81b64 | ||
|
|
8259a5a71f | ||
|
|
3100e852ce | ||
|
|
1b0ccf10e5 | ||
|
|
f38ff3b622 | ||
|
|
3ad9052b5c | ||
|
|
e0595ce518 | ||
|
|
54bf83855c | ||
|
|
2713469651 | ||
|
|
64bdf2d35b | ||
|
|
c3ac8869b0 | ||
|
|
ecb37d0bdf | ||
|
|
b2083a7bee | ||
|
|
2ff255dedc | ||
|
|
995f5db913 | ||
|
|
9efbf2f880 | ||
|
|
34fdf5a36f | ||
|
|
f70a2ba1f7 | ||
|
|
eb718bffe0 | ||
|
|
70f14600d1 | ||
|
|
05289216c4 | ||
|
|
c1814201be | ||
|
|
dceb0d9bf7 | ||
|
|
df768cab7d | ||
|
|
9b8d1b88c5 | ||
|
|
885cf20afa | ||
|
|
663c994dfb | ||
|
|
7eba08f385 | ||
|
|
da8ce07216 | ||
|
|
442499b452 | ||
|
|
49db0a3720 | ||
|
|
ebac7b7aad |
@@ -77,7 +77,7 @@ homeassistant/components/counter/* @fabaff
|
||||
homeassistant/components/cover/* @home-assistant/core
|
||||
homeassistant/components/cpuspeed/* @fabaff
|
||||
homeassistant/components/cups/* @fabaff
|
||||
homeassistant/components/daikin/* @fredrike @rofrantz
|
||||
homeassistant/components/daikin/* @fredrike
|
||||
homeassistant/components/darksky/* @fabaff
|
||||
homeassistant/components/deconz/* @kane610
|
||||
homeassistant/components/delijn/* @bollewolle
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=60000
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant
|
||||
COPY . homeassistant/
|
||||
RUN pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RUN \
|
||||
pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-e ./homeassistant \
|
||||
|
||||
@@ -347,12 +347,17 @@ class AmbientStation:
|
||||
|
||||
async def _attempt_connect(self):
|
||||
"""Attempt to connect to the socket (retrying later on fail)."""
|
||||
try:
|
||||
|
||||
async def connect(timestamp=None):
|
||||
"""Connect."""
|
||||
await self.client.websocket.connect()
|
||||
|
||||
try:
|
||||
await connect()
|
||||
except WebsocketError as err:
|
||||
_LOGGER.error("Error with the websocket connection: %s", err)
|
||||
self._ws_reconnect_delay = min(2 * self._ws_reconnect_delay, 480)
|
||||
async_call_later(self._hass, self._ws_reconnect_delay, self.ws_connect)
|
||||
async_call_later(self._hass, self._ws_reconnect_delay, connect)
|
||||
|
||||
async def ws_connect(self):
|
||||
"""Register handlers and connect to the websocket."""
|
||||
|
||||
@@ -298,8 +298,6 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
print(self.current_observations)
|
||||
print(self.observations_by_entity)
|
||||
return {
|
||||
ATTR_OBSERVATIONS: list(self.current_observations.values()),
|
||||
ATTR_OCCURRED_OBSERVATION_ENTITIES: list(
|
||||
|
||||
@@ -482,7 +482,7 @@ async def google_assistant_list(hass, connection, msg):
|
||||
{
|
||||
"entity_id": entity.entity_id,
|
||||
"traits": [trait.name for trait in entity.traits()],
|
||||
"might_2fa": entity.might_2fa(),
|
||||
"might_2fa": entity.might_2fa_traits(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from . import config_flow # noqa: F401
|
||||
from .const import TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -91,7 +92,7 @@ async def daikin_api_setup(hass, host):
|
||||
|
||||
session = hass.helpers.aiohttp_client.async_get_clientsession()
|
||||
try:
|
||||
with timeout(10):
|
||||
with timeout(TIMEOUT):
|
||||
device = Appliance(host, session)
|
||||
await device.init()
|
||||
except asyncio.TimeoutError:
|
||||
|
||||
@@ -84,7 +84,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
async def async_setup_entry(hass, entry, async_add_entities):
|
||||
"""Set up Daikin climate based on config_entry."""
|
||||
daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id)
|
||||
async_add_entities([DaikinClimate(daikin_api)])
|
||||
async_add_entities([DaikinClimate(daikin_api)], update_before_add=True)
|
||||
|
||||
|
||||
class DaikinClimate(ClimateDevice):
|
||||
|
||||
@@ -10,7 +10,7 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_HOST
|
||||
|
||||
from .const import KEY_IP, KEY_MAC
|
||||
from .const import KEY_IP, KEY_MAC, TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,7 +38,7 @@ class FlowHandler(config_entries.ConfigFlow):
|
||||
device = Appliance(
|
||||
host, self.hass.helpers.aiohttp_client.async_get_clientsession()
|
||||
)
|
||||
with timeout(10):
|
||||
with timeout(TIMEOUT):
|
||||
await device.init()
|
||||
except asyncio.TimeoutError:
|
||||
return self.async_abort(reason="device_timeout")
|
||||
|
||||
@@ -25,3 +25,5 @@ SENSOR_TYPES = {
|
||||
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
|
||||
TIMEOUT = 60
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
"name": "Daikin AC",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"requirements": ["pydaikin==1.6.2"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@fredrike", "@rofrantz"],
|
||||
"requirements": ["pydaikin==1.6.3"],
|
||||
"codeowners": ["@fredrike"],
|
||||
"quality_scale": "platinum"
|
||||
}
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
"""Support for Daikin AC sensors."""
|
||||
import logging
|
||||
|
||||
from homeassistant.const import CONF_ICON, CONF_NAME, CONF_TYPE
|
||||
from homeassistant.const import CONF_ICON, CONF_NAME, TEMP_CELSIUS
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util.unit_system import UnitSystem
|
||||
|
||||
from . import DOMAIN as DAIKIN_DOMAIN
|
||||
from .const import (
|
||||
ATTR_INSIDE_TEMPERATURE,
|
||||
ATTR_OUTSIDE_TEMPERATURE,
|
||||
SENSOR_TYPE_TEMPERATURE,
|
||||
SENSOR_TYPES,
|
||||
)
|
||||
from .const import ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, SENSOR_TYPES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,30 +25,19 @@ async def async_setup_entry(hass, entry, async_add_entities):
|
||||
sensors = [ATTR_INSIDE_TEMPERATURE]
|
||||
if daikin_api.device.support_outside_temperature:
|
||||
sensors.append(ATTR_OUTSIDE_TEMPERATURE)
|
||||
async_add_entities(
|
||||
[
|
||||
DaikinClimateSensor(daikin_api, sensor, hass.config.units)
|
||||
for sensor in sensors
|
||||
]
|
||||
)
|
||||
async_add_entities([DaikinClimateSensor(daikin_api, sensor) for sensor in sensors])
|
||||
|
||||
|
||||
class DaikinClimateSensor(Entity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
def __init__(self, api, monitored_state, units: UnitSystem, name=None) -> None:
|
||||
def __init__(self, api, monitored_state) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._api = api
|
||||
self._sensor = SENSOR_TYPES.get(monitored_state)
|
||||
if name is None:
|
||||
name = f"{self._sensor[CONF_NAME]} {api.name}"
|
||||
|
||||
self._name = f"{name} {monitored_state.replace('_', ' ')}"
|
||||
self._sensor = SENSOR_TYPES[monitored_state]
|
||||
self._name = f"{api.name} {self._sensor[CONF_NAME]}"
|
||||
self._device_attribute = monitored_state
|
||||
|
||||
if self._sensor[CONF_TYPE] == SENSOR_TYPE_TEMPERATURE:
|
||||
self._unit_of_measurement = units.temperature_unit
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique ID."""
|
||||
@@ -82,7 +65,7 @@ class DaikinClimateSensor(Entity):
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit_of_measurement
|
||||
return TEMP_CELSIUS
|
||||
|
||||
async def async_update(self):
|
||||
"""Retrieve latest state."""
|
||||
|
||||
@@ -68,17 +68,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if "base" not in errors:
|
||||
info, errors = await self._async_validate_or_error(user_input)
|
||||
if not errors:
|
||||
await self.async_set_unique_id(info["mac_addr"])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
@@ -119,8 +110,31 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, user_input):
|
||||
"""Handle import."""
|
||||
if user_input:
|
||||
info, errors = await self._async_validate_or_error(user_input)
|
||||
if not errors:
|
||||
await self.async_set_unique_id(
|
||||
info["mac_addr"], raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_validate_or_error(self, user_input):
|
||||
"""Validate doorbird or error."""
|
||||
errors = {}
|
||||
info = {}
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
return info, errors
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
|
||||
@@ -39,7 +39,7 @@ from .const import (
|
||||
ELK_ELEMENTS,
|
||||
)
|
||||
|
||||
SYNC_TIMEOUT = 55
|
||||
SYNC_TIMEOUT = 120
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -215,7 +215,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
|
||||
if not await async_wait_for_elk_to_sync(elk, SYNC_TIMEOUT):
|
||||
_LOGGER.error(
|
||||
"Timed out after %d seconds while trying to sync with ElkM1", SYNC_TIMEOUT,
|
||||
"Timed out after %d seconds while trying to sync with ElkM1 at %s",
|
||||
SYNC_TIMEOUT,
|
||||
conf[CONF_HOST],
|
||||
)
|
||||
elk.disconnect()
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
@@ -64,8 +64,9 @@ async def validate_input(data):
|
||||
timed_out = False
|
||||
if not await async_wait_for_elk_to_sync(elk, VALIDATE_TIMEOUT):
|
||||
_LOGGER.error(
|
||||
"Timed out after %d seconds while trying to sync with elkm1",
|
||||
"Timed out after %d seconds while trying to sync with ElkM1 at %s",
|
||||
VALIDATE_TIMEOUT,
|
||||
url,
|
||||
)
|
||||
timed_out = True
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200407.1"],
|
||||
"requirements": ["home-assistant-frontend==20200407.2"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -372,14 +372,19 @@ class GoogleEntity:
|
||||
@callback
|
||||
def might_2fa(self) -> bool:
|
||||
"""Return if the entity might encounter 2FA."""
|
||||
if not self.config.should_2fa(self.state):
|
||||
return False
|
||||
|
||||
return self.might_2fa_traits()
|
||||
|
||||
@callback
|
||||
def might_2fa_traits(self) -> bool:
|
||||
"""Return if the entity might encounter 2FA based on just traits."""
|
||||
state = self.state
|
||||
domain = state.domain
|
||||
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
|
||||
if not self.config.should_2fa(state):
|
||||
return False
|
||||
|
||||
return any(
|
||||
trait.might_2fa(domain, features, device_class) for trait in self.traits()
|
||||
)
|
||||
|
||||
@@ -424,6 +424,9 @@ class HERETravelTimeData:
|
||||
if departure is not None:
|
||||
departure = convert_time_to_isodate(departure)
|
||||
|
||||
if departure is None and arrival is None:
|
||||
departure = "now"
|
||||
|
||||
_LOGGER.debug(
|
||||
"Requesting route for origin: %s, destination: %s, route_mode: %s, mode: %s, traffic_mode: %s, arrival: %s, departure: %s",
|
||||
origin,
|
||||
|
||||
@@ -6,6 +6,7 @@ from zlib import adler32
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import cover
|
||||
from homeassistant.components.cover import DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE
|
||||
from homeassistant.components.media_player import DEVICE_CLASS_TV
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
@@ -200,7 +201,7 @@ def get_accessory(hass, driver, state, aid, config):
|
||||
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
|
||||
if device_class == "garage" and features & (
|
||||
if device_class in (DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE) and features & (
|
||||
cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE
|
||||
):
|
||||
a_type = "GarageDoorOpener"
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
"domain": "homekit",
|
||||
"name": "HomeKit",
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit",
|
||||
"requirements": ["HAP-python==2.8.1"],
|
||||
"requirements": ["HAP-python==2.8.2"],
|
||||
"codeowners": []
|
||||
}
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/intesishome",
|
||||
"dependencies": [],
|
||||
"codeowners": ["@jnimmo"],
|
||||
"requirements": ["pyintesishome==1.7.1"]
|
||||
"requirements": ["pyintesishome==1.7.3"]
|
||||
}
|
||||
|
||||
@@ -6,8 +6,10 @@ from pyipp import (
|
||||
IPP,
|
||||
IPPConnectionError,
|
||||
IPPConnectionUpgradeRequired,
|
||||
IPPError,
|
||||
IPPParseError,
|
||||
IPPResponseError,
|
||||
IPPVersionNotSupportedError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -70,10 +72,16 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except IPPConnectionUpgradeRequired:
|
||||
return self._show_setup_form({"base": "connection_upgrade"})
|
||||
except (IPPConnectionError, IPPResponseError):
|
||||
_LOGGER.debug("IPP Connection/Response Error", exc_info=True)
|
||||
return self._show_setup_form({"base": "connection_error"})
|
||||
except IPPParseError:
|
||||
_LOGGER.exception("IPP Parse Error")
|
||||
_LOGGER.debug("IPP Parse Error", exc_info=True)
|
||||
return self.async_abort(reason="parse_error")
|
||||
except IPPVersionNotSupportedError:
|
||||
return self.async_abort(reason="ipp_version_error")
|
||||
except IPPError:
|
||||
_LOGGER.debug("IPP Error", exc_info=True)
|
||||
return self.async_abort(reason="ipp_error")
|
||||
|
||||
user_input[CONF_UUID] = info[CONF_UUID]
|
||||
|
||||
@@ -111,12 +119,19 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except IPPConnectionUpgradeRequired:
|
||||
return self.async_abort(reason="connection_upgrade")
|
||||
except (IPPConnectionError, IPPResponseError):
|
||||
_LOGGER.debug("IPP Connection/Response Error", exc_info=True)
|
||||
return self.async_abort(reason="connection_error")
|
||||
except IPPParseError:
|
||||
_LOGGER.exception("IPP Parse Error")
|
||||
_LOGGER.debug("IPP Parse Error", exc_info=True)
|
||||
return self.async_abort(reason="parse_error")
|
||||
except IPPVersionNotSupportedError:
|
||||
return self.async_abort(reason="ipp_version_error")
|
||||
except IPPError:
|
||||
_LOGGER.debug("IPP Error", exc_info=True)
|
||||
return self.async_abort(reason="ipp_error")
|
||||
|
||||
self.discovery_info[CONF_UUID] = info[CONF_UUID]
|
||||
if info[CONF_UUID] is not None:
|
||||
self.discovery_info[CONF_UUID] = info[CONF_UUID]
|
||||
|
||||
await self.async_set_unique_id(self.discovery_info[CONF_UUID])
|
||||
self._abort_if_unique_id_configured(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "ipp",
|
||||
"name": "Internet Printing Protocol (IPP)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ipp",
|
||||
"requirements": ["pyipp==0.9.0"],
|
||||
"requirements": ["pyipp==0.10.1"],
|
||||
"codeowners": ["@ctalkington"],
|
||||
"config_flow": true,
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -116,7 +116,12 @@ class IPPMarkerSensor(IPPSensor):
|
||||
@property
|
||||
def state(self) -> Union[None, str, int, float]:
|
||||
"""Return the state of the sensor."""
|
||||
return self.coordinator.data.markers[self.marker_index].level
|
||||
level = self.coordinator.data.markers[self.marker_index].level
|
||||
|
||||
if level >= 0:
|
||||
return level
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class IPPPrinterSensor(IPPSensor):
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
"already_configured": "This printer is already configured.",
|
||||
"connection_error": "Failed to connect to printer.",
|
||||
"connection_upgrade": "Failed to connect to printer due to connection upgrade being required.",
|
||||
"ipp_error": "Encountered IPP error.",
|
||||
"ipp_version_error": "IPP version not supported by printer.",
|
||||
"parse_error": "Failed to parse response from printer."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ SERVICE_LOW_HZ = "set_low_hz"
|
||||
SERVICE_SUB_DB = "set_sub_db"
|
||||
SERVICE_UPDATE_DSP = "update_dsp"
|
||||
|
||||
DSP_SCAN_INTERVAL = 3600
|
||||
DSP_SCAN_INTERVAL = timedelta(seconds=3600)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
|
||||
@@ -283,11 +283,6 @@ class KonnectedFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# build config info and wait for user confirmation
|
||||
self.data[CONF_HOST] = user_input[CONF_HOST]
|
||||
self.data[CONF_PORT] = user_input[CONF_PORT]
|
||||
self.data[CONF_ACCESS_TOKEN] = self.hass.data.get(DOMAIN, {}).get(
|
||||
CONF_ACCESS_TOKEN
|
||||
) or "".join(
|
||||
random.choices(f"{string.ascii_uppercase}{string.digits}", k=20)
|
||||
)
|
||||
|
||||
# brief delay to allow processing of recent status req
|
||||
await asyncio.sleep(0.1)
|
||||
@@ -343,8 +338,12 @@ class KonnectedFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
# Attach default options and create entry
|
||||
# Create access token, attach default options and create entry
|
||||
self.data[CONF_DEFAULT_OPTIONS] = self.options
|
||||
self.data[CONF_ACCESS_TOKEN] = self.hass.data.get(DOMAIN, {}).get(
|
||||
CONF_ACCESS_TOKEN
|
||||
) or "".join(random.choices(f"{string.ascii_uppercase}{string.digits}", k=20))
|
||||
|
||||
return self.async_create_entry(
|
||||
title=KONN_PANEL_MODEL_NAMES[self.data[CONF_MODEL]], data=self.data,
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def to_lutron_level(level):
|
||||
"""Convert the given Home Assistant light level (0-255) to Lutron (0-100)."""
|
||||
return int((level * 100) // 255)
|
||||
return int(round((level * 100) / 255))
|
||||
|
||||
|
||||
def to_hass_level(level):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "lutron_caseta",
|
||||
"name": "Lutron Caseta",
|
||||
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
|
||||
"requirements": ["pylutron-caseta==0.6.0"],
|
||||
"dependencies": [],
|
||||
"requirements": ["pylutron-caseta==0.6.1"],
|
||||
"codeowners": ["@swails"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from async_timeout import timeout
|
||||
from pymodbus.client.asynchronous import schedulers
|
||||
from pymodbus.client.asynchronous.serial import AsyncModbusSerialClient as ClientSerial
|
||||
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient as ClientTCP
|
||||
@@ -20,7 +21,6 @@ from homeassistant.const import (
|
||||
CONF_PORT,
|
||||
CONF_TIMEOUT,
|
||||
CONF_TYPE,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -35,7 +35,7 @@ from .const import (
|
||||
CONF_PARITY,
|
||||
CONF_STOPBITS,
|
||||
DEFAULT_HUB,
|
||||
MODBUS_DOMAIN,
|
||||
MODBUS_DOMAIN as DOMAIN,
|
||||
SERVICE_WRITE_COIL,
|
||||
SERVICE_WRITE_REGISTER,
|
||||
)
|
||||
@@ -68,7 +68,7 @@ ETHERNET_SCHEMA = BASE_SCHEMA.extend(
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{MODBUS_DOMAIN: vol.All(cv.ensure_list, [vol.Any(SERIAL_SCHEMA, ETHERNET_SCHEMA)])},
|
||||
{DOMAIN: vol.All(cv.ensure_list, [vol.Any(SERIAL_SCHEMA, ETHERNET_SCHEMA)])},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -95,10 +95,9 @@ SERVICE_WRITE_COIL_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up Modbus component."""
|
||||
hass.data[MODBUS_DOMAIN] = hub_collect = {}
|
||||
hass.data[DOMAIN] = hub_collect = {}
|
||||
|
||||
_LOGGER.debug("registering hubs")
|
||||
for client_config in config[MODBUS_DOMAIN]:
|
||||
for client_config in config[DOMAIN]:
|
||||
hub_collect[client_config[CONF_NAME]] = ModbusHub(client_config, hass.loop)
|
||||
|
||||
def stop_modbus(event):
|
||||
@@ -106,28 +105,13 @@ async def async_setup(hass, config):
|
||||
for client in hub_collect.values():
|
||||
del client
|
||||
|
||||
def start_modbus(event):
|
||||
def start_modbus():
|
||||
"""Start Modbus service."""
|
||||
for client in hub_collect.values():
|
||||
_LOGGER.debug("setup hub %s", client.name)
|
||||
client.setup()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_modbus)
|
||||
|
||||
# Register services for modbus
|
||||
hass.services.async_register(
|
||||
MODBUS_DOMAIN,
|
||||
SERVICE_WRITE_REGISTER,
|
||||
write_register,
|
||||
schema=SERVICE_WRITE_REGISTER_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
MODBUS_DOMAIN,
|
||||
SERVICE_WRITE_COIL,
|
||||
write_coil,
|
||||
schema=SERVICE_WRITE_COIL_SCHEMA,
|
||||
)
|
||||
|
||||
async def write_register(service):
|
||||
"""Write Modbus registers."""
|
||||
unit = int(float(service.data[ATTR_UNIT]))
|
||||
@@ -151,8 +135,19 @@ async def async_setup(hass, config):
|
||||
client_name = service.data[ATTR_HUB]
|
||||
await hub_collect[client_name].write_coil(unit, address, state)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_modbus)
|
||||
# do not wait for EVENT_HOMEASSISTANT_START, activate pymodbus now
|
||||
await hass.async_add_executor_job(start_modbus)
|
||||
|
||||
# Register services for modbus
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_WRITE_REGISTER,
|
||||
write_register,
|
||||
schema=SERVICE_WRITE_REGISTER_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_WRITE_COIL, write_coil, schema=SERVICE_WRITE_COIL_SCHEMA,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -161,7 +156,6 @@ class ModbusHub:
|
||||
|
||||
def __init__(self, client_config, main_loop):
|
||||
"""Initialize the Modbus hub."""
|
||||
_LOGGER.debug("Preparing setup: %s", client_config)
|
||||
|
||||
# generic configuration
|
||||
self._loop = main_loop
|
||||
@@ -171,7 +165,7 @@ class ModbusHub:
|
||||
self._config_type = client_config[CONF_TYPE]
|
||||
self._config_port = client_config[CONF_PORT]
|
||||
self._config_timeout = client_config[CONF_TIMEOUT]
|
||||
self._config_delay = client_config[CONF_DELAY]
|
||||
self._config_delay = 0
|
||||
|
||||
if self._config_type == "serial":
|
||||
# serial configuration
|
||||
@@ -183,6 +177,7 @@ class ModbusHub:
|
||||
else:
|
||||
# network configuration
|
||||
self._config_host = client_config[CONF_HOST]
|
||||
self._config_delay = client_config[CONF_DELAY]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -200,7 +195,6 @@ class ModbusHub:
|
||||
# Client* do deliver loop, client as result but
|
||||
# pylint does not accept that fact
|
||||
|
||||
_LOGGER.debug("doing setup")
|
||||
if self._config_type == "serial":
|
||||
_, self._client = ClientSerial(
|
||||
schedulers.ASYNC_IO,
|
||||
@@ -210,7 +204,6 @@ class ModbusHub:
|
||||
stopbits=self._config_stopbits,
|
||||
bytesize=self._config_bytesize,
|
||||
parity=self._config_parity,
|
||||
timeout=self._config_timeout,
|
||||
loop=self._loop,
|
||||
)
|
||||
elif self._config_type == "rtuovertcp":
|
||||
@@ -246,7 +239,12 @@ class ModbusHub:
|
||||
await self._connect_delay()
|
||||
async with self._lock:
|
||||
kwargs = {"unit": unit} if unit else {}
|
||||
result = await func(address, count, **kwargs)
|
||||
try:
|
||||
async with timeout(self._config_timeout):
|
||||
result = await func(address, count, **kwargs)
|
||||
except asyncio.TimeoutError:
|
||||
result = None
|
||||
|
||||
if isinstance(result, (ModbusException, ExceptionResponse)):
|
||||
_LOGGER.error("Hub %s Exception (%s)", self._config_name, result)
|
||||
return result
|
||||
@@ -256,7 +254,11 @@ class ModbusHub:
|
||||
await self._connect_delay()
|
||||
async with self._lock:
|
||||
kwargs = {"unit": unit} if unit else {}
|
||||
await func(address, value, **kwargs)
|
||||
try:
|
||||
async with timeout(self._config_timeout):
|
||||
func(address, value, **kwargs)
|
||||
except asyncio.TimeoutError:
|
||||
return
|
||||
|
||||
async def read_coils(self, unit, address, count):
|
||||
"""Read coils."""
|
||||
|
||||
@@ -54,7 +54,7 @@ PLATFORM_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the Modbus binary sensors."""
|
||||
sensors = []
|
||||
for entry in config[CONF_INPUTS]:
|
||||
@@ -70,7 +70,7 @@ async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
)
|
||||
)
|
||||
|
||||
add_entities(sensors)
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class ModbusBinarySensor(BinarySensorDevice):
|
||||
|
||||
@@ -72,7 +72,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the Modbus Thermostat Platform."""
|
||||
name = config[CONF_NAME]
|
||||
modbus_slave = config[CONF_SLAVE]
|
||||
@@ -91,7 +91,7 @@ async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
hub_name = config[CONF_HUB]
|
||||
hub = hass.data[MODBUS_DOMAIN][hub_name]
|
||||
|
||||
add_entities(
|
||||
async_add_entities(
|
||||
[
|
||||
ModbusThermostat(
|
||||
hub,
|
||||
|
||||
@@ -89,7 +89,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the Modbus sensors."""
|
||||
sensors = []
|
||||
data_types = {DATA_TYPE_INT: {1: "h", 2: "i", 4: "q"}}
|
||||
@@ -148,7 +148,7 @@ async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
if not sensors:
|
||||
return False
|
||||
add_entities(sensors)
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class ModbusRegisterSensor(RestoreEntity):
|
||||
|
||||
@@ -76,7 +76,7 @@ PLATFORM_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Read configuration and create Modbus devices."""
|
||||
switches = []
|
||||
if CONF_COILS in config:
|
||||
@@ -109,7 +109,7 @@ async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
)
|
||||
)
|
||||
|
||||
add_entities(switches)
|
||||
async_add_entities(switches)
|
||||
|
||||
|
||||
class ModbusCoilSwitch(ToggleEntity, RestoreEntity):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Support for interfacing with Monoprice 6 zone home audio controller."""
|
||||
import logging
|
||||
|
||||
from serial import SerialException
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.components.media_player import MediaPlayerDevice
|
||||
from homeassistant.components.media_player.const import (
|
||||
@@ -18,6 +20,8 @@ from .const import CONF_SOURCES, DOMAIN, SERVICE_RESTORE, SERVICE_SNAPSHOT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
SUPPORT_MONOPRICE = (
|
||||
SUPPORT_VOLUME_MUTE
|
||||
| SUPPORT_VOLUME_SET
|
||||
@@ -127,9 +131,15 @@ class MonopriceZone(MediaPlayerDevice):
|
||||
|
||||
def update(self):
|
||||
"""Retrieve latest state."""
|
||||
state = self._monoprice.zone_status(self._zone_id)
|
||||
try:
|
||||
state = self._monoprice.zone_status(self._zone_id)
|
||||
except SerialException:
|
||||
_LOGGER.warning("Could not update zone %d", self._zone_id)
|
||||
return
|
||||
|
||||
if not state:
|
||||
return False
|
||||
return
|
||||
|
||||
self._state = STATE_ON if state.power else STATE_OFF
|
||||
self._volume = state.volume
|
||||
self._mute = state.mute
|
||||
@@ -138,7 +148,6 @@ class MonopriceZone(MediaPlayerDevice):
|
||||
self._source = self._source_id_name[idx]
|
||||
else:
|
||||
self._source = None
|
||||
return True
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self):
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import logging
|
||||
|
||||
from nexia.const import (
|
||||
FAN_MODES,
|
||||
OPERATION_MODE_AUTO,
|
||||
OPERATION_MODE_COOL,
|
||||
OPERATION_MODE_HEAT,
|
||||
@@ -192,7 +191,7 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateDevice):
|
||||
@property
|
||||
def fan_modes(self):
|
||||
"""Return the list of available fan modes."""
|
||||
return FAN_MODES
|
||||
return self._thermostat.get_fan_modes()
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "nexia",
|
||||
"name": "Nexia",
|
||||
"requirements": ["nexia==0.8.0"],
|
||||
"requirements": ["nexia==0.9.1"],
|
||||
"codeowners": ["@ryannazaretian", "@bdraco"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/nexia",
|
||||
"config_flow": true
|
||||
|
||||
@@ -63,7 +63,7 @@ SENSORS = (
|
||||
"nextcloud_storage_num_files",
|
||||
"nextcloud_storage_num_storages",
|
||||
"nextcloud_storage_num_storages_local",
|
||||
"nextcloud_storage_num_storage_home",
|
||||
"nextcloud_storage_num_storages_home",
|
||||
"nextcloud_storage_num_storages_other",
|
||||
"nextcloud_shares_num_shares",
|
||||
"nextcloud_shares_num_shares_user",
|
||||
@@ -83,9 +83,9 @@ SENSORS = (
|
||||
"nextcloud_database_type",
|
||||
"nextcloud_database_version",
|
||||
"nextcloud_database_version",
|
||||
"nextcloud_activeusers_last5minutes",
|
||||
"nextcloud_activeusers_last1hour",
|
||||
"nextcloud_activeusers_last24hours",
|
||||
"nextcloud_activeUsers_last5minutes",
|
||||
"nextcloud_activeUsers_last1hour",
|
||||
"nextcloud_activeUsers_last24hours",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ def _firmware_from_status(status):
|
||||
def _serial_from_status(status):
|
||||
"""Find the best serialvalue from the status."""
|
||||
serial = status.get("device.serial") or status.get("ups.serial")
|
||||
if serial and serial == "unknown":
|
||||
if serial and (serial.lower() == "unknown" or serial.count("0") == len(serial)):
|
||||
return None
|
||||
return serial
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"If you do not want to have your printer on <br />"
|
||||
" at all times, and you would like to monitor <br /> "
|
||||
"temperatures, please add <br />"
|
||||
"bed and/or number_of_tools to your configuration <br />"
|
||||
"bed and/or number_of_tools to your configuration <br />"
|
||||
"and restart.",
|
||||
title=NOTIFICATION_TITLE,
|
||||
notification_id=NOTIFICATION_ID,
|
||||
|
||||
@@ -411,8 +411,11 @@ class ONVIFHassCamera(Camera):
|
||||
req = media_service.create_type("GetSnapshotUri")
|
||||
req.ProfileToken = profiles[self._profile_index].token
|
||||
|
||||
snapshot_uri = await media_service.GetSnapshotUri(req)
|
||||
self._snapshot = snapshot_uri.Uri
|
||||
try:
|
||||
snapshot_uri = await media_service.GetSnapshotUri(req)
|
||||
self._snapshot = snapshot_uri.Uri
|
||||
except ServerDisconnectedError as err:
|
||||
_LOGGER.debug("Camera does not support GetSnapshotUri: %s", err)
|
||||
|
||||
_LOGGER.debug(
|
||||
"ONVIF Camera Using the following URL for %s snapshot: %s",
|
||||
@@ -516,7 +519,8 @@ class ONVIFHassCamera(Camera):
|
||||
"""Read image from a URL."""
|
||||
try:
|
||||
response = requests.get(self._snapshot, timeout=5, auth=auth)
|
||||
return response.content
|
||||
if response.status_code < 300:
|
||||
return response.content
|
||||
except requests.exceptions.RequestException as error:
|
||||
_LOGGER.error(
|
||||
"Fetch snapshot image failed from %s, falling back to FFmpeg; %s",
|
||||
@@ -524,6 +528,8 @@ class ONVIFHassCamera(Camera):
|
||||
error,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
image = await self.hass.async_add_job(fetch)
|
||||
|
||||
if image is None:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Shared class to maintain Plex server instances."""
|
||||
from functools import partial, wraps
|
||||
import logging
|
||||
import ssl
|
||||
from urllib.parse import urlparse
|
||||
@@ -13,8 +12,8 @@ import requests.exceptions
|
||||
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .const import (
|
||||
CONF_CLIENT_IDENTIFIER,
|
||||
@@ -43,31 +42,6 @@ plexapi.X_PLEX_PRODUCT = X_PLEX_PRODUCT
|
||||
plexapi.X_PLEX_VERSION = X_PLEX_VERSION
|
||||
|
||||
|
||||
def debounce(func):
|
||||
"""Decorate function to debounce callbacks from Plex websocket."""
|
||||
|
||||
unsub = None
|
||||
|
||||
async def call_later_listener(self, _):
|
||||
"""Handle call_later callback."""
|
||||
nonlocal unsub
|
||||
unsub = None
|
||||
await func(self)
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(self):
|
||||
"""Schedule async callback."""
|
||||
nonlocal unsub
|
||||
if unsub:
|
||||
_LOGGER.debug("Throttling update of %s", self.friendly_name)
|
||||
unsub() # pylint: disable=not-callable
|
||||
unsub = async_call_later(
|
||||
self.hass, DEBOUNCE_TIMEOUT, partial(call_later_listener, self),
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class PlexServer:
|
||||
"""Manages a single Plex server connection."""
|
||||
|
||||
@@ -87,6 +61,13 @@ class PlexServer:
|
||||
self._accounts = []
|
||||
self._owner_username = None
|
||||
self._version = None
|
||||
self.async_update_platforms = Debouncer(
|
||||
hass,
|
||||
_LOGGER,
|
||||
cooldown=DEBOUNCE_TIMEOUT,
|
||||
immediate=True,
|
||||
function=self._async_update_platforms,
|
||||
).async_call
|
||||
|
||||
# Header conditionally added as it is not available in config entry v1
|
||||
if CONF_CLIENT_IDENTIFIER in server_config:
|
||||
@@ -192,8 +173,7 @@ class PlexServer:
|
||||
"""Fetch all data from the Plex server in a single method."""
|
||||
return (self._plex_server.clients(), self._plex_server.sessions())
|
||||
|
||||
@debounce
|
||||
async def async_update_platforms(self):
|
||||
async def _async_update_platforms(self):
|
||||
"""Update the platform entities."""
|
||||
_LOGGER.debug("Updating devices")
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ class PowerWallGridStatusSensor(PowerWallEntity, BinarySensorDevice):
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Get the current value in kWh."""
|
||||
"""Grid is online."""
|
||||
return (
|
||||
self._coordinator.data[POWERWALL_API_GRID_STATUS] == POWERWALL_GRID_ONLINE
|
||||
)
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
DOMAIN = "powerwall"
|
||||
|
||||
POWERWALL_SITE_NAME = "site_name"
|
||||
|
||||
POWERWALL_OBJECT = "powerwall"
|
||||
POWERWALL_COORDINATOR = "coordinator"
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
UPDATE_INTERVAL = 30
|
||||
|
||||
ATTR_REGION = "region"
|
||||
ATTR_GRID_CODE = "grid_code"
|
||||
@@ -46,3 +44,5 @@ POWERWALL_RUNNING_KEY = "running"
|
||||
|
||||
MODEL = "PowerWall 2"
|
||||
MANUFACTURER = "Tesla"
|
||||
|
||||
ENERGY_KILO_WATT = "kW"
|
||||
|
||||
@@ -4,7 +4,6 @@ import logging
|
||||
from homeassistant.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_POWER,
|
||||
ENERGY_KILO_WATT_HOUR,
|
||||
UNIT_PERCENTAGE,
|
||||
)
|
||||
|
||||
@@ -14,6 +13,7 @@ from .const import (
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_INSTANT_AVERAGE_VOLTAGE,
|
||||
DOMAIN,
|
||||
ENERGY_KILO_WATT,
|
||||
POWERWALL_API_CHARGE,
|
||||
POWERWALL_API_DEVICE_TYPE,
|
||||
POWERWALL_API_METERS,
|
||||
@@ -87,7 +87,7 @@ class PowerWallEnergySensor(PowerWallEntity):
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement."""
|
||||
return ENERGY_KILO_WATT_HOUR
|
||||
return ENERGY_KILO_WATT
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -106,7 +106,7 @@ class PowerWallEnergySensor(PowerWallEntity):
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Get the current value in kWh."""
|
||||
"""Get the current value in kW."""
|
||||
meter = self._coordinator.data[POWERWALL_API_METERS][self._meter]
|
||||
return round(float(meter.instant_power / 1000), 3)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Recollect Waste curbside collection pickup."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import recollect_waste
|
||||
@@ -16,7 +17,7 @@ CONF_PLACE_ID = "place_id"
|
||||
CONF_SERVICE_ID = "service_id"
|
||||
DEFAULT_NAME = "recollect_waste"
|
||||
ICON = "mdi:trash-can-outline"
|
||||
SCAN_INTERVAL = 86400
|
||||
SCAN_INTERVAL = timedelta(days=1)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import (
|
||||
ACTIVE_UPDATE_RATE,
|
||||
@@ -27,6 +28,7 @@ from .const import (
|
||||
SENSE_DEVICES_DATA,
|
||||
SENSE_DISCOVERED_DEVICES_DATA,
|
||||
SENSE_TIMEOUT_EXCEPTIONS,
|
||||
SENSE_TRENDS_COORDINATOR,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -111,9 +113,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
except SENSE_TIMEOUT_EXCEPTIONS:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
trends_coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"Sense Trends {email}",
|
||||
update_method=gateway.update_trend_data,
|
||||
update_interval=timedelta(seconds=300),
|
||||
)
|
||||
|
||||
# This can take longer than 60s and we already know
|
||||
# sense is online since get_discovered_device_data was
|
||||
# successful so we do it later.
|
||||
hass.loop.create_task(trends_coordinator.async_request_refresh())
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
SENSE_DATA: gateway,
|
||||
SENSE_DEVICES_DATA: sense_devices_data,
|
||||
SENSE_TRENDS_COORDINATOR: trends_coordinator,
|
||||
SENSE_DISCOVERED_DEVICES_DATA: sense_discovered_devices,
|
||||
}
|
||||
|
||||
@@ -122,7 +138,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
hass.config_entries.async_forward_entry_setup(entry, component)
|
||||
)
|
||||
|
||||
async def async_sense_update(now):
|
||||
async def async_sense_update(_):
|
||||
"""Retrieve latest state."""
|
||||
try:
|
||||
await gateway.update_realtime()
|
||||
|
||||
@@ -71,7 +71,6 @@ class SenseDevice(BinarySensorDevice):
|
||||
self._unique_id = f"{sense_monitor_id}-{self._id}"
|
||||
self._icon = sense_to_mdi(device["icon"])
|
||||
self._sense_devices_data = sense_devices_data
|
||||
self._undo_dispatch_subscription = None
|
||||
self._state = None
|
||||
self._available = False
|
||||
|
||||
@@ -117,17 +116,14 @@ class SenseDevice(BinarySensorDevice):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self._undo_dispatch_subscription = async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Undo subscription."""
|
||||
if self._undo_dispatch_subscription:
|
||||
self._undo_dispatch_subscription()
|
||||
|
||||
@callback
|
||||
def _async_update_from_data(self):
|
||||
"""Get the latest data, update state. Must not do I/O."""
|
||||
|
||||
@@ -12,6 +12,7 @@ SENSE_DATA = "sense_data"
|
||||
SENSE_DEVICE_UPDATE = "sense_devices_update"
|
||||
SENSE_DEVICES_DATA = "sense_devices_data"
|
||||
SENSE_DISCOVERED_DEVICES_DATA = "sense_discovered_devices"
|
||||
SENSE_TRENDS_COORDINATOR = "sense_trends_coorindator"
|
||||
|
||||
ACTIVE_NAME = "Energy"
|
||||
ACTIVE_TYPE = "active"
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
"""Support for monitoring a Sense energy sensor."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.const import DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, POWER_WATT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import (
|
||||
ACTIVE_NAME,
|
||||
@@ -22,12 +20,9 @@ from .const import (
|
||||
SENSE_DEVICE_UPDATE,
|
||||
SENSE_DEVICES_DATA,
|
||||
SENSE_DISCOVERED_DEVICES_DATA,
|
||||
SENSE_TIMEOUT_EXCEPTIONS,
|
||||
SENSE_TRENDS_COORDINATOR,
|
||||
)
|
||||
|
||||
MIN_TIME_BETWEEN_DAILY_UPDATES = timedelta(seconds=300)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -64,17 +59,18 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Sense sensor."""
|
||||
data = hass.data[DOMAIN][config_entry.entry_id][SENSE_DATA]
|
||||
sense_devices_data = hass.data[DOMAIN][config_entry.entry_id][SENSE_DEVICES_DATA]
|
||||
trends_coordinator = hass.data[DOMAIN][config_entry.entry_id][
|
||||
SENSE_TRENDS_COORDINATOR
|
||||
]
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_DAILY_UPDATES)
|
||||
async def update_trends():
|
||||
"""Update the daily power usage."""
|
||||
await data.update_trend_data()
|
||||
# Request only in case it takes longer
|
||||
# than 60s
|
||||
await trends_coordinator.async_request_refresh()
|
||||
|
||||
sense_monitor_id = data.sense_monitor_id
|
||||
sense_devices = hass.data[DOMAIN][config_entry.entry_id][
|
||||
SENSE_DISCOVERED_DEVICES_DATA
|
||||
]
|
||||
await data.update_trend_data()
|
||||
|
||||
devices = [
|
||||
SenseEnergyDevice(sense_devices_data, device, sense_monitor_id)
|
||||
@@ -108,8 +104,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
name,
|
||||
sensor_type,
|
||||
is_production,
|
||||
update_trends,
|
||||
var,
|
||||
trends_coordinator,
|
||||
unique_id,
|
||||
)
|
||||
)
|
||||
@@ -140,7 +135,6 @@ class SenseActiveSensor(Entity):
|
||||
self._sensor_type = sensor_type
|
||||
self._is_production = is_production
|
||||
self._state = None
|
||||
self._undo_dispatch_subscription = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -179,17 +173,14 @@ class SenseActiveSensor(Entity):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self._undo_dispatch_subscription = async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Undo subscription."""
|
||||
if self._undo_dispatch_subscription:
|
||||
self._undo_dispatch_subscription()
|
||||
|
||||
@callback
|
||||
def _async_update_from_data(self):
|
||||
"""Update the sensor from the data. Must not do I/O."""
|
||||
@@ -206,7 +197,7 @@ class SenseTrendsSensor(Entity):
|
||||
"""Implementation of a Sense energy sensor."""
|
||||
|
||||
def __init__(
|
||||
self, data, name, sensor_type, is_production, update_call, sensor_id, unique_id
|
||||
self, data, name, sensor_type, is_production, trends_coordinator, unique_id,
|
||||
):
|
||||
"""Initialize the Sense sensor."""
|
||||
name_type = PRODUCTION_NAME if is_production else CONSUMPTION_NAME
|
||||
@@ -215,10 +206,11 @@ class SenseTrendsSensor(Entity):
|
||||
self._available = False
|
||||
self._data = data
|
||||
self._sensor_type = sensor_type
|
||||
self.update_sensor = update_call
|
||||
self._coordinator = trends_coordinator
|
||||
self._is_production = is_production
|
||||
self._state = None
|
||||
self._unit_of_measurement = ENERGY_KILO_WATT_HOUR
|
||||
self._had_any_update = False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -228,12 +220,12 @@ class SenseTrendsSensor(Entity):
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
return round(self._data.get_trend(self._sensor_type, self._is_production), 1)
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return the availability of the sensor."""
|
||||
return self._available
|
||||
"""Return if entity is available."""
|
||||
return self._had_any_update and self._coordinator.last_update_success
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
@@ -250,18 +242,27 @@ class SenseTrendsSensor(Entity):
|
||||
"""Return the unique id."""
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No need to poll. Coordinator notifies entity of updates."""
|
||||
return False
|
||||
|
||||
@callback
|
||||
def _async_update(self):
|
||||
"""Track if we had an update so we do not report zero data."""
|
||||
self._had_any_update = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self):
|
||||
"""Get the latest data, update state."""
|
||||
"""Update the entity.
|
||||
|
||||
try:
|
||||
await self.update_sensor()
|
||||
except SENSE_TIMEOUT_EXCEPTIONS:
|
||||
_LOGGER.error("Timeout retrieving data")
|
||||
return
|
||||
Only used by the generic entity update service.
|
||||
"""
|
||||
await self._coordinator.async_request_refresh()
|
||||
|
||||
state = self._data.get_trend(self._sensor_type, self._is_production)
|
||||
self._state = round(state, 1)
|
||||
self._available = True
|
||||
async def async_added_to_hass(self):
|
||||
"""When entity is added to hass."""
|
||||
self.async_on_remove(self._coordinator.async_add_listener(self._async_update))
|
||||
|
||||
|
||||
class SenseEnergyDevice(Entity):
|
||||
@@ -276,7 +277,6 @@ class SenseEnergyDevice(Entity):
|
||||
self._unique_id = f"{sense_monitor_id}-{self._id}-{CONSUMPTION_ID}"
|
||||
self._icon = sense_to_mdi(device["icon"])
|
||||
self._sense_devices_data = sense_devices_data
|
||||
self._undo_dispatch_subscription = None
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
@@ -321,17 +321,14 @@ class SenseEnergyDevice(Entity):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self._undo_dispatch_subscription = async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}",
|
||||
self._async_update_from_data,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Undo subscription."""
|
||||
if self._undo_dispatch_subscription:
|
||||
self._undo_dispatch_subscription()
|
||||
|
||||
@callback
|
||||
def _async_update_from_data(self):
|
||||
"""Get the latest data, update state. Must not do I/O."""
|
||||
|
||||
@@ -74,11 +74,7 @@ class SlackNotificationService(BaseNotificationService):
|
||||
self._default_channel = default_channel
|
||||
self._hass = hass
|
||||
self._icon = icon
|
||||
|
||||
if username or self._icon:
|
||||
self._as_user = False
|
||||
else:
|
||||
self._as_user = True
|
||||
self._username = username
|
||||
|
||||
async def _async_send_local_file_message(self, path, targets, message, title):
|
||||
"""Upload a local file (with message) to Slack."""
|
||||
@@ -108,11 +104,11 @@ class SlackNotificationService(BaseNotificationService):
|
||||
target: self._client.chat_postMessage(
|
||||
channel=target,
|
||||
text=message,
|
||||
as_user=self._as_user,
|
||||
attachments=attachments,
|
||||
blocks=blocks,
|
||||
icon_emoji=self._icon,
|
||||
link_names=True,
|
||||
username=self._username,
|
||||
)
|
||||
for target in targets
|
||||
}
|
||||
|
||||
@@ -3,6 +3,5 @@
|
||||
"name": "Switcher",
|
||||
"documentation": "https://www.home-assistant.io/integrations/switcher_kis/",
|
||||
"codeowners": ["@tomerfi"],
|
||||
"requirements": ["aioswitcher==2019.4.26"],
|
||||
"dependencies": []
|
||||
"requirements": ["aioswitcher==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -15,18 +15,22 @@ from homeassistant.components.light import (
|
||||
SUPPORT_COLOR_TEMP,
|
||||
Light,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.device_registry as dr
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired as kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin as mired_to_kelvin,
|
||||
)
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import CONF_LIGHT, DOMAIN as TPLINK_DOMAIN
|
||||
from .common import async_add_entities_retry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
CURRENT_POWER_UPDATE_INTERVAL = timedelta(seconds=60)
|
||||
HISTORICAL_POWER_UPDATE_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,6 +38,22 @@ ATTR_CURRENT_POWER_W = "current_power_w"
|
||||
ATTR_DAILY_ENERGY_KWH = "daily_energy_kwh"
|
||||
ATTR_MONTHLY_ENERGY_KWH = "monthly_energy_kwh"
|
||||
|
||||
LIGHT_STATE_DFT_ON = "dft_on_state"
|
||||
LIGHT_STATE_ON_OFF = "on_off"
|
||||
LIGHT_STATE_RELAY_STATE = "relay_state"
|
||||
LIGHT_STATE_BRIGHTNESS = "brightness"
|
||||
LIGHT_STATE_COLOR_TEMP = "color_temp"
|
||||
LIGHT_STATE_HUE = "hue"
|
||||
LIGHT_STATE_SATURATION = "saturation"
|
||||
LIGHT_STATE_ERROR_MSG = "err_msg"
|
||||
|
||||
LIGHT_SYSINFO_MAC = "mac"
|
||||
LIGHT_SYSINFO_ALIAS = "alias"
|
||||
LIGHT_SYSINFO_MODEL = "model"
|
||||
LIGHT_SYSINFO_IS_DIMMABLE = "is_dimmable"
|
||||
LIGHT_SYSINFO_IS_VARIABLE_COLOR_TEMP = "is_variable_color_temp"
|
||||
LIGHT_SYSINFO_IS_COLOR = "is_color"
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the platform.
|
||||
@@ -82,7 +102,21 @@ class LightState(NamedTuple):
|
||||
brightness: int
|
||||
color_temp: float
|
||||
hs: Tuple[int, int]
|
||||
emeter_params: dict
|
||||
|
||||
def to_param(self):
|
||||
"""Return a version that we can send to the bulb."""
|
||||
if self.color_temp:
|
||||
color_temp = mired_to_kelvin(self.color_temp)
|
||||
else:
|
||||
color_temp = None
|
||||
|
||||
return {
|
||||
LIGHT_STATE_ON_OFF: 1 if self.state else 0,
|
||||
LIGHT_STATE_BRIGHTNESS: brightness_to_percentage(self.brightness),
|
||||
LIGHT_STATE_COLOR_TEMP: color_temp,
|
||||
LIGHT_STATE_HUE: self.hs[0] if self.hs else 0,
|
||||
LIGHT_STATE_SATURATION: self.hs[1] if self.hs else 0,
|
||||
}
|
||||
|
||||
|
||||
class LightFeatures(NamedTuple):
|
||||
@@ -95,6 +129,7 @@ class LightFeatures(NamedTuple):
|
||||
supported_features: int
|
||||
min_mireds: float
|
||||
max_mireds: float
|
||||
has_emeter: bool
|
||||
|
||||
|
||||
class TPLinkSmartBulb(Light):
|
||||
@@ -107,6 +142,9 @@ class TPLinkSmartBulb(Light):
|
||||
self._light_state = cast(LightState, None)
|
||||
self._is_available = True
|
||||
self._is_setting_light_state = False
|
||||
self._last_current_power_update = None
|
||||
self._last_historical_power_update = None
|
||||
self._emeter_params = {}
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
@@ -137,45 +175,42 @@ class TPLinkSmartBulb(Light):
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the device."""
|
||||
return self._light_state.emeter_params
|
||||
return self._emeter_params
|
||||
|
||||
async def async_turn_on(self, **kwargs):
|
||||
"""Turn the light on."""
|
||||
brightness = (
|
||||
int(kwargs[ATTR_BRIGHTNESS])
|
||||
if ATTR_BRIGHTNESS in kwargs
|
||||
else self._light_state.brightness
|
||||
if self._light_state.brightness is not None
|
||||
else 255
|
||||
)
|
||||
color_tmp = (
|
||||
int(kwargs[ATTR_COLOR_TEMP])
|
||||
if ATTR_COLOR_TEMP in kwargs
|
||||
else self._light_state.color_temp
|
||||
)
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness = int(kwargs[ATTR_BRIGHTNESS])
|
||||
elif self._light_state.brightness is not None:
|
||||
brightness = self._light_state.brightness
|
||||
else:
|
||||
brightness = 255
|
||||
|
||||
await self.async_set_light_state_retry(
|
||||
if ATTR_COLOR_TEMP in kwargs:
|
||||
color_tmp = int(kwargs[ATTR_COLOR_TEMP])
|
||||
else:
|
||||
color_tmp = self._light_state.color_temp
|
||||
|
||||
if ATTR_HS_COLOR in kwargs:
|
||||
# TP-Link requires integers.
|
||||
hue_sat = tuple(int(val) for val in kwargs[ATTR_HS_COLOR])
|
||||
|
||||
# TP-Link cannot have both color temp and hue_sat
|
||||
color_tmp = 0
|
||||
else:
|
||||
hue_sat = self._light_state.hs
|
||||
|
||||
await self._async_set_light_state_retry(
|
||||
self._light_state,
|
||||
LightState(
|
||||
state=True,
|
||||
brightness=brightness,
|
||||
color_temp=color_tmp,
|
||||
hs=tuple(kwargs.get(ATTR_HS_COLOR, self._light_state.hs or ())),
|
||||
emeter_params=self._light_state.emeter_params,
|
||||
self._light_state._replace(
|
||||
state=True, brightness=brightness, color_temp=color_tmp, hs=hue_sat,
|
||||
),
|
||||
)
|
||||
|
||||
async def async_turn_off(self, **kwargs):
|
||||
"""Turn the light off."""
|
||||
await self.async_set_light_state_retry(
|
||||
self._light_state,
|
||||
LightState(
|
||||
state=False,
|
||||
brightness=self._light_state.brightness,
|
||||
color_temp=self._light_state.color_temp,
|
||||
hs=self._light_state.hs,
|
||||
emeter_params=self._light_state.emeter_params,
|
||||
),
|
||||
await self._async_set_light_state_retry(
|
||||
self._light_state, self._light_state._replace(state=False),
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -214,21 +249,11 @@ class TPLinkSmartBulb(Light):
|
||||
if self._is_setting_light_state:
|
||||
return
|
||||
|
||||
# Initial run, perform call blocking.
|
||||
if not self._light_features:
|
||||
self.do_update_retry(False)
|
||||
# Subsequent runs should not block.
|
||||
else:
|
||||
self.hass.add_job(self.do_update_retry, True)
|
||||
|
||||
def do_update_retry(self, update_state: bool) -> None:
|
||||
"""Update state data with retry.""" ""
|
||||
try:
|
||||
# Update light features only once.
|
||||
self._light_features = (
|
||||
self._light_features or self.get_light_features_retry()
|
||||
)
|
||||
self._light_state = self.get_light_state_retry(self._light_features)
|
||||
if not self._light_features:
|
||||
self._light_features = self._get_light_features_retry()
|
||||
self._light_state = self._get_light_state_retry()
|
||||
self._is_available = True
|
||||
except (SmartDeviceException, OSError) as ex:
|
||||
if self._is_available:
|
||||
@@ -237,45 +262,43 @@ class TPLinkSmartBulb(Light):
|
||||
)
|
||||
self._is_available = False
|
||||
|
||||
# The local variables were updates asyncronousally,
|
||||
# we need the entity registry to poll this object's properties for
|
||||
# updated information. Calling schedule_update_ha_state will only
|
||||
# cause a loop.
|
||||
if update_state:
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
return self._light_features.supported_features
|
||||
|
||||
def get_light_features_retry(self) -> LightFeatures:
|
||||
def _get_light_features_retry(self) -> LightFeatures:
|
||||
"""Retry the retrieval of the supported features."""
|
||||
try:
|
||||
return self.get_light_features()
|
||||
return self._get_light_features()
|
||||
except (SmartDeviceException, OSError):
|
||||
pass
|
||||
|
||||
_LOGGER.debug("Retrying getting light features")
|
||||
return self.get_light_features()
|
||||
return self._get_light_features()
|
||||
|
||||
def get_light_features(self):
|
||||
def _get_light_features(self):
|
||||
"""Determine all supported features in one go."""
|
||||
sysinfo = self.smartbulb.sys_info
|
||||
supported_features = 0
|
||||
# Calling api here as it reformats
|
||||
mac = self.smartbulb.mac
|
||||
alias = self.smartbulb.alias
|
||||
model = self.smartbulb.model
|
||||
alias = sysinfo[LIGHT_SYSINFO_ALIAS]
|
||||
model = sysinfo[LIGHT_SYSINFO_MODEL]
|
||||
min_mireds = None
|
||||
max_mireds = None
|
||||
has_emeter = self.smartbulb.has_emeter
|
||||
|
||||
if self.smartbulb.is_dimmable:
|
||||
if sysinfo.get(LIGHT_SYSINFO_IS_DIMMABLE) or LIGHT_STATE_BRIGHTNESS in sysinfo:
|
||||
supported_features += SUPPORT_BRIGHTNESS
|
||||
if getattr(self.smartbulb, "is_variable_color_temp", False):
|
||||
if sysinfo.get(LIGHT_SYSINFO_IS_VARIABLE_COLOR_TEMP):
|
||||
supported_features += SUPPORT_COLOR_TEMP
|
||||
min_mireds = kelvin_to_mired(self.smartbulb.valid_temperature_range[1])
|
||||
max_mireds = kelvin_to_mired(self.smartbulb.valid_temperature_range[0])
|
||||
if getattr(self.smartbulb, "is_color", False):
|
||||
# Have to make another api request here in
|
||||
# order to not re-implement pyHS100 here
|
||||
max_range, min_range = self.smartbulb.valid_temperature_range
|
||||
min_mireds = kelvin_to_mired(min_range)
|
||||
max_mireds = kelvin_to_mired(max_range)
|
||||
if sysinfo.get(LIGHT_SYSINFO_IS_COLOR):
|
||||
supported_features += SUPPORT_COLOR
|
||||
|
||||
return LightFeatures(
|
||||
@@ -286,112 +309,186 @@ class TPLinkSmartBulb(Light):
|
||||
supported_features=supported_features,
|
||||
min_mireds=min_mireds,
|
||||
max_mireds=max_mireds,
|
||||
has_emeter=has_emeter,
|
||||
)
|
||||
|
||||
def get_light_state_retry(self, light_features: LightFeatures) -> LightState:
|
||||
def _get_light_state_retry(self) -> LightState:
|
||||
"""Retry the retrieval of getting light states."""
|
||||
try:
|
||||
return self.get_light_state(light_features)
|
||||
return self._get_light_state()
|
||||
except (SmartDeviceException, OSError):
|
||||
pass
|
||||
|
||||
_LOGGER.debug("Retrying getting light state")
|
||||
return self.get_light_state(light_features)
|
||||
return self._get_light_state()
|
||||
|
||||
def get_light_state(self, light_features: LightFeatures) -> LightState:
|
||||
"""Get the light state."""
|
||||
emeter_params = {}
|
||||
def _light_state_from_params(self, light_state_params) -> LightState:
|
||||
brightness = None
|
||||
color_temp = None
|
||||
hue_saturation = None
|
||||
state = self.smartbulb.state == SmartBulb.BULB_STATE_ON
|
||||
light_features = self._light_features
|
||||
|
||||
state = bool(light_state_params[LIGHT_STATE_ON_OFF])
|
||||
|
||||
if not state and LIGHT_STATE_DFT_ON in light_state_params:
|
||||
light_state_params = light_state_params[LIGHT_STATE_DFT_ON]
|
||||
|
||||
if light_features.supported_features & SUPPORT_BRIGHTNESS:
|
||||
brightness = brightness_from_percentage(self.smartbulb.brightness)
|
||||
brightness = brightness_from_percentage(
|
||||
light_state_params[LIGHT_STATE_BRIGHTNESS]
|
||||
)
|
||||
|
||||
if light_features.supported_features & SUPPORT_COLOR_TEMP:
|
||||
if self.smartbulb.color_temp is not None and self.smartbulb.color_temp != 0:
|
||||
color_temp = kelvin_to_mired(self.smartbulb.color_temp)
|
||||
if (
|
||||
light_state_params.get(LIGHT_STATE_COLOR_TEMP) is not None
|
||||
and light_state_params[LIGHT_STATE_COLOR_TEMP] != 0
|
||||
):
|
||||
color_temp = kelvin_to_mired(light_state_params[LIGHT_STATE_COLOR_TEMP])
|
||||
|
||||
if light_features.supported_features & SUPPORT_COLOR:
|
||||
hue, sat, _ = self.smartbulb.hsv
|
||||
hue_saturation = (hue, sat)
|
||||
|
||||
if self.smartbulb.has_emeter:
|
||||
emeter_params[ATTR_CURRENT_POWER_W] = "{:.1f}".format(
|
||||
self.smartbulb.current_consumption()
|
||||
hue_saturation = (
|
||||
light_state_params[LIGHT_STATE_HUE],
|
||||
light_state_params[LIGHT_STATE_SATURATION],
|
||||
)
|
||||
daily_statistics = self.smartbulb.get_emeter_daily()
|
||||
monthly_statistics = self.smartbulb.get_emeter_monthly()
|
||||
try:
|
||||
emeter_params[ATTR_DAILY_ENERGY_KWH] = "{:.3f}".format(
|
||||
daily_statistics[int(time.strftime("%d"))]
|
||||
)
|
||||
emeter_params[ATTR_MONTHLY_ENERGY_KWH] = "{:.3f}".format(
|
||||
monthly_statistics[int(time.strftime("%m"))]
|
||||
)
|
||||
except KeyError:
|
||||
# device returned no daily/monthly history
|
||||
pass
|
||||
|
||||
return LightState(
|
||||
state=state,
|
||||
brightness=brightness,
|
||||
color_temp=color_temp,
|
||||
hs=hue_saturation,
|
||||
emeter_params=emeter_params,
|
||||
)
|
||||
|
||||
async def async_set_light_state_retry(
|
||||
def _get_light_state(self) -> LightState:
|
||||
"""Get the light state."""
|
||||
self._update_emeter()
|
||||
return self._light_state_from_params(self._get_device_state())
|
||||
|
||||
def _update_emeter(self):
|
||||
if not self._light_features.has_emeter:
|
||||
return
|
||||
|
||||
now = dt_util.utcnow()
|
||||
if (
|
||||
not self._last_current_power_update
|
||||
or self._last_current_power_update + CURRENT_POWER_UPDATE_INTERVAL < now
|
||||
):
|
||||
self._last_current_power_update = now
|
||||
self._emeter_params[ATTR_CURRENT_POWER_W] = "{:.1f}".format(
|
||||
self.smartbulb.current_consumption()
|
||||
)
|
||||
|
||||
if (
|
||||
not self._last_historical_power_update
|
||||
or self._last_historical_power_update + HISTORICAL_POWER_UPDATE_INTERVAL
|
||||
< now
|
||||
):
|
||||
self._last_historical_power_update = now
|
||||
daily_statistics = self.smartbulb.get_emeter_daily()
|
||||
monthly_statistics = self.smartbulb.get_emeter_monthly()
|
||||
try:
|
||||
self._emeter_params[ATTR_DAILY_ENERGY_KWH] = "{:.3f}".format(
|
||||
daily_statistics[int(time.strftime("%d"))]
|
||||
)
|
||||
self._emeter_params[ATTR_MONTHLY_ENERGY_KWH] = "{:.3f}".format(
|
||||
monthly_statistics[int(time.strftime("%m"))]
|
||||
)
|
||||
except KeyError:
|
||||
# device returned no daily/monthly history
|
||||
pass
|
||||
|
||||
async def _async_set_light_state_retry(
|
||||
self, old_light_state: LightState, new_light_state: LightState
|
||||
) -> None:
|
||||
"""Set the light state with retry."""
|
||||
# Optimistically setting the light state.
|
||||
self._light_state = new_light_state
|
||||
|
||||
# Tell the device to set the states.
|
||||
if not _light_state_diff(old_light_state, new_light_state):
|
||||
# Nothing to do, avoid the executor
|
||||
return
|
||||
|
||||
self._is_setting_light_state = True
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
self.set_light_state, old_light_state, new_light_state
|
||||
light_state_params = await self.hass.async_add_executor_job(
|
||||
self._set_light_state, old_light_state, new_light_state
|
||||
)
|
||||
self._is_available = True
|
||||
self._is_setting_light_state = False
|
||||
if LIGHT_STATE_ERROR_MSG in light_state_params:
|
||||
raise HomeAssistantError(light_state_params[LIGHT_STATE_ERROR_MSG])
|
||||
self._light_state = self._light_state_from_params(light_state_params)
|
||||
return
|
||||
except (SmartDeviceException, OSError):
|
||||
pass
|
||||
|
||||
try:
|
||||
_LOGGER.debug("Retrying setting light state")
|
||||
await self.hass.async_add_executor_job(
|
||||
self.set_light_state, old_light_state, new_light_state
|
||||
light_state_params = await self.hass.async_add_executor_job(
|
||||
self._set_light_state, old_light_state, new_light_state
|
||||
)
|
||||
self._is_available = True
|
||||
if LIGHT_STATE_ERROR_MSG in light_state_params:
|
||||
raise HomeAssistantError(light_state_params[LIGHT_STATE_ERROR_MSG])
|
||||
self._light_state = self._light_state_from_params(light_state_params)
|
||||
except (SmartDeviceException, OSError) as ex:
|
||||
self._is_available = False
|
||||
_LOGGER.warning("Could not set data for %s: %s", self.smartbulb.host, ex)
|
||||
|
||||
self._is_setting_light_state = False
|
||||
|
||||
def set_light_state(
|
||||
def _set_light_state(
|
||||
self, old_light_state: LightState, new_light_state: LightState
|
||||
) -> None:
|
||||
"""Set the light state."""
|
||||
# Calling the API with the new state information.
|
||||
if new_light_state.state != old_light_state.state:
|
||||
if new_light_state.state:
|
||||
self.smartbulb.state = SmartBulb.BULB_STATE_ON
|
||||
diff = _light_state_diff(old_light_state, new_light_state)
|
||||
|
||||
if not diff:
|
||||
return
|
||||
|
||||
return self._set_device_state(diff)
|
||||
|
||||
def _get_device_state(self):
|
||||
"""State of the bulb or smart dimmer switch."""
|
||||
if isinstance(self.smartbulb, SmartBulb):
|
||||
return self.smartbulb.get_light_state()
|
||||
|
||||
sysinfo = self.smartbulb.sys_info
|
||||
# Its not really a bulb, its a dimmable SmartPlug (aka Wall Switch)
|
||||
return {
|
||||
LIGHT_STATE_ON_OFF: sysinfo[LIGHT_STATE_RELAY_STATE],
|
||||
LIGHT_STATE_BRIGHTNESS: sysinfo.get(LIGHT_STATE_BRIGHTNESS, 0),
|
||||
LIGHT_STATE_COLOR_TEMP: 0,
|
||||
LIGHT_STATE_HUE: 0,
|
||||
LIGHT_STATE_SATURATION: 0,
|
||||
}
|
||||
|
||||
def _set_device_state(self, state):
|
||||
"""Set state of the bulb or smart dimmer switch."""
|
||||
if isinstance(self.smartbulb, SmartBulb):
|
||||
return self.smartbulb.set_light_state(state)
|
||||
|
||||
# Its not really a bulb, its a dimmable SmartPlug (aka Wall Switch)
|
||||
if LIGHT_STATE_BRIGHTNESS in state:
|
||||
# Brightness of 0 is accepted by the
|
||||
# device but the underlying library rejects it
|
||||
# so we turn off instead.
|
||||
if state[LIGHT_STATE_BRIGHTNESS]:
|
||||
self.smartbulb.brightness = state[LIGHT_STATE_BRIGHTNESS]
|
||||
else:
|
||||
self.smartbulb.state = SmartBulb.BULB_STATE_OFF
|
||||
return
|
||||
self.smartbulb.state = self.smartbulb.SWITCH_STATE_OFF
|
||||
elif LIGHT_STATE_ON_OFF in state:
|
||||
if state[LIGHT_STATE_ON_OFF]:
|
||||
self.smartbulb.state = self.smartbulb.SWITCH_STATE_ON
|
||||
else:
|
||||
self.smartbulb.state = self.smartbulb.SWITCH_STATE_OFF
|
||||
|
||||
if new_light_state.color_temp != old_light_state.color_temp:
|
||||
self.smartbulb.color_temp = mired_to_kelvin(new_light_state.color_temp)
|
||||
return self._get_device_state()
|
||||
|
||||
brightness_pct = brightness_to_percentage(new_light_state.brightness)
|
||||
if new_light_state.hs != old_light_state.hs and len(new_light_state.hs) > 1:
|
||||
hue, sat = new_light_state.hs
|
||||
hsv = (int(hue), int(sat), brightness_pct)
|
||||
self.smartbulb.hsv = hsv
|
||||
elif new_light_state.brightness != old_light_state.brightness:
|
||||
self.smartbulb.brightness = brightness_pct
|
||||
|
||||
def _light_state_diff(old_light_state: LightState, new_light_state: LightState):
|
||||
old_state_param = old_light_state.to_param()
|
||||
new_state_param = new_light_state.to_param()
|
||||
|
||||
return {
|
||||
key: value
|
||||
for key, value in new_state_param.items()
|
||||
if new_state_param.get(key) != old_state_param.get(key)
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import logging
|
||||
|
||||
from homeassistant.components.unifi.config_flow import get_controller_from_config_entry
|
||||
from homeassistant.const import DATA_BYTES
|
||||
from homeassistant.const import DATA_MEGABYTES
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
@@ -116,7 +116,7 @@ class UniFiRxBandwidthSensor(UniFiClient):
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement of this entity."""
|
||||
return DATA_BYTES
|
||||
return DATA_MEGABYTES
|
||||
|
||||
|
||||
class UniFiTxBandwidthSensor(UniFiRxBandwidthSensor):
|
||||
|
||||
@@ -125,8 +125,8 @@ class VizioOptionsConfigFlow(config_entries.OptionsFlow):
|
||||
default_include_or_exclude = (
|
||||
CONF_EXCLUDE
|
||||
if self.config_entry.options
|
||||
and CONF_EXCLUDE in self.config_entry.options.get(CONF_APPS)
|
||||
else CONF_EXCLUDE
|
||||
and CONF_EXCLUDE in self.config_entry.options.get(CONF_APPS, {})
|
||||
else CONF_INCLUDE
|
||||
)
|
||||
options.update(
|
||||
{
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
"domain": "vizio",
|
||||
"name": "VIZIO SmartCast",
|
||||
"documentation": "https://www.home-assistant.io/integrations/vizio",
|
||||
"requirements": ["pyvizio==0.1.44"],
|
||||
"dependencies": [],
|
||||
"requirements": ["pyvizio==0.1.46"],
|
||||
"codeowners": ["@raman325"],
|
||||
"config_flow": true,
|
||||
"zeroconf": ["_viziocast._tcp.local."],
|
||||
|
||||
@@ -32,6 +32,7 @@ from .core.const import (
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
RadioType,
|
||||
)
|
||||
from .core.discovery import GROUP_PROBE
|
||||
|
||||
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema({vol.Optional(ha_const.CONF_TYPE): cv.string})
|
||||
|
||||
@@ -138,6 +139,7 @@ async def async_unload_entry(hass, config_entry):
|
||||
"""Unload ZHA config entry."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
|
||||
|
||||
GROUP_PROBE.cleanup()
|
||||
api.async_unload_api(hass)
|
||||
|
||||
dispatchers = hass.data[DATA_ZHA].get(DATA_ZHA_DISPATCHERS, [])
|
||||
|
||||
@@ -208,6 +208,7 @@ SIGNAL_SET_LEVEL = "set_level"
|
||||
SIGNAL_STATE_ATTR = "update_state_attribute"
|
||||
SIGNAL_UPDATE_DEVICE = "{}_zha_update_device"
|
||||
SIGNAL_REMOVE_GROUP = "remove_group"
|
||||
SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed"
|
||||
SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change"
|
||||
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
@@ -551,7 +551,15 @@ class ZHADevice(LogMixin):
|
||||
|
||||
async def async_remove_from_group(self, group_id):
|
||||
"""Remove this device from the provided zigbee group."""
|
||||
await self._zigpy_device.remove_from_group(group_id)
|
||||
try:
|
||||
await self._zigpy_device.remove_from_group(group_id)
|
||||
except (zigpy.exceptions.DeliveryError, asyncio.TimeoutError) as ex:
|
||||
self.debug(
|
||||
"Failed to remove device '%s' from group: 0x%04x ex: %s",
|
||||
self._zigpy_device.ieee,
|
||||
group_id,
|
||||
str(ex),
|
||||
)
|
||||
|
||||
async def async_bind_to_group(self, group_id, cluster_bindings):
|
||||
"""Directly bind this device to a group for the given clusters."""
|
||||
|
||||
@@ -6,7 +6,10 @@ from typing import Callable, List, Tuple
|
||||
|
||||
from homeassistant import const as ha_const
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_registry import async_entries_for_device
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
@@ -166,10 +169,30 @@ class GroupProbe:
|
||||
def __init__(self):
|
||||
"""Initialize instance."""
|
||||
self._hass = None
|
||||
self._unsubs = []
|
||||
|
||||
def initialize(self, hass: HomeAssistantType) -> None:
|
||||
"""Initialize the group probe."""
|
||||
self._hass = hass
|
||||
self._unsubs.append(
|
||||
async_dispatcher_connect(
|
||||
hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group
|
||||
)
|
||||
)
|
||||
|
||||
def cleanup(self):
|
||||
"""Clean up on when zha shuts down."""
|
||||
for unsub in self._unsubs[:]:
|
||||
unsub()
|
||||
self._unsubs.remove(unsub)
|
||||
|
||||
def _reprobe_group(self, group_id: int) -> None:
|
||||
"""Reprobe a group for entities after its members change."""
|
||||
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
|
||||
zha_group = zha_gateway.groups.get(group_id)
|
||||
if zha_group is None:
|
||||
return
|
||||
self.discover_group_entities(zha_group)
|
||||
|
||||
@callback
|
||||
def discover_group_entities(self, group: zha_typing.ZhaGroupType) -> None:
|
||||
|
||||
@@ -20,7 +20,10 @@ from homeassistant.helpers.device_registry import (
|
||||
async_get_registry as get_dev_reg,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity_registry import async_get_registry as get_ent_reg
|
||||
from homeassistant.helpers.entity_registry import (
|
||||
async_entries_for_device,
|
||||
async_get_registry as get_ent_reg,
|
||||
)
|
||||
|
||||
from . import discovery, typing as zha_typing
|
||||
from .const import (
|
||||
@@ -77,7 +80,7 @@ from .const import (
|
||||
from .device import DeviceStatus, ZHADevice
|
||||
from .group import ZHAGroup
|
||||
from .patches import apply_application_controller_patch
|
||||
from .registries import RADIO_TYPES
|
||||
from .registries import GROUP_ENTITY_DOMAINS, RADIO_TYPES
|
||||
from .store import async_get_registry
|
||||
from .typing import ZhaDeviceType, ZhaGroupType, ZigpyEndpointType, ZigpyGroupType
|
||||
|
||||
@@ -273,6 +276,9 @@ class ZHAGateway:
|
||||
async_dispatcher_send(
|
||||
self._hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}"
|
||||
)
|
||||
if len(zha_group.members) == 2:
|
||||
# we need to do this because there wasn't already a group entity to remove and re-add
|
||||
discovery.GROUP_PROBE.discover_group_entities(zha_group)
|
||||
|
||||
def group_added(self, zigpy_group: ZigpyGroupType) -> None:
|
||||
"""Handle zigpy group added event."""
|
||||
@@ -289,6 +295,7 @@ class ZHAGateway:
|
||||
async_dispatcher_send(
|
||||
self._hass, f"{SIGNAL_REMOVE_GROUP}_0x{zigpy_group.group_id:04x}"
|
||||
)
|
||||
self._cleanup_group_entity_registry_entries(zigpy_group)
|
||||
|
||||
def _send_group_gateway_message(
|
||||
self, zigpy_group: ZigpyGroupType, gateway_message_type: str
|
||||
@@ -368,6 +375,35 @@ class ZHAGateway:
|
||||
e for e in entity_refs if e.reference_id != entity.entity_id
|
||||
]
|
||||
|
||||
def _cleanup_group_entity_registry_entries(
|
||||
self, zigpy_group: ZigpyGroupType
|
||||
) -> None:
|
||||
"""Remove entity registry entries for group entities when the groups are removed from HA."""
|
||||
# first we collect the potential unique ids for entities that could be created from this group
|
||||
possible_entity_unique_ids = [
|
||||
f"{domain}_zha_group_0x{zigpy_group.group_id:04x}"
|
||||
for domain in GROUP_ENTITY_DOMAINS
|
||||
]
|
||||
|
||||
# then we get all group entity entries tied to the coordinator
|
||||
all_group_entity_entries = async_entries_for_device(
|
||||
self.ha_entity_registry, self.coordinator_zha_device.device_id
|
||||
)
|
||||
|
||||
# then we get the entity entries for this specific group by getting the entries that match
|
||||
entries_to_remove = [
|
||||
entry
|
||||
for entry in all_group_entity_entries
|
||||
if entry.unique_id in possible_entity_unique_ids
|
||||
]
|
||||
|
||||
# then we remove the entries from the entity registry
|
||||
for entry in entries_to_remove:
|
||||
_LOGGER.debug(
|
||||
"cleaning up entity registry entry for entity: %s", entry.entity_id
|
||||
)
|
||||
self.ha_entity_registry.async_remove(entry.entity_id)
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices."""
|
||||
@@ -557,15 +593,7 @@ class ZHAGateway:
|
||||
)
|
||||
tasks.append(self.devices[ieee].async_add_to_group(group_id))
|
||||
await asyncio.gather(*tasks)
|
||||
zha_group = self.groups.get(group_id)
|
||||
_LOGGER.debug(
|
||||
"Probing group: %s:0x%04x for entity discovery",
|
||||
zha_group.name,
|
||||
zha_group.group_id,
|
||||
)
|
||||
discovery.GROUP_PROBE.discover_group_entities(zha_group)
|
||||
|
||||
return zha_group
|
||||
return self.groups.get(group_id)
|
||||
|
||||
async def async_remove_zigpy_group(self, group_id: int) -> None:
|
||||
"""Remove a Zigbee group from Zigpy."""
|
||||
|
||||
@@ -8,7 +8,10 @@ from typing import Any, Awaitable, Dict, List, Optional
|
||||
from homeassistant.core import CALLBACK_TYPE, State, callback
|
||||
from homeassistant.helpers import entity
|
||||
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
@@ -19,6 +22,7 @@ from .core.const import (
|
||||
DATA_ZHA,
|
||||
DATA_ZHA_BRIDGE_ID,
|
||||
DOMAIN,
|
||||
SIGNAL_GROUP_ENTITY_REMOVED,
|
||||
SIGNAL_GROUP_MEMBERSHIP_CHANGE,
|
||||
SIGNAL_REMOVE,
|
||||
SIGNAL_REMOVE_GROUP,
|
||||
@@ -32,7 +36,7 @@ ENTITY_SUFFIX = "entity_suffix"
|
||||
RESTART_GRACE_PERIOD = 7200 # 2 hours
|
||||
|
||||
|
||||
class BaseZhaEntity(RestoreEntity, LogMixin, entity.Entity):
|
||||
class BaseZhaEntity(LogMixin, entity.Entity):
|
||||
"""A base class for ZHA entities."""
|
||||
|
||||
def __init__(self, unique_id: str, zha_device: ZhaDeviceType, **kwargs):
|
||||
@@ -112,7 +116,6 @@ class BaseZhaEntity(RestoreEntity, LogMixin, entity.Entity):
|
||||
@callback
|
||||
def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None:
|
||||
"""Set the entity state."""
|
||||
pass
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
@@ -133,11 +136,6 @@ class BaseZhaEntity(RestoreEntity, LogMixin, entity.Entity):
|
||||
self.zha_device.gateway.remove_entity_reference(self)
|
||||
self.remove_future.set_result(True)
|
||||
|
||||
@callback
|
||||
def async_restore_last_state(self, last_state) -> None:
|
||||
"""Restore previous state."""
|
||||
pass
|
||||
|
||||
async def async_accept_signal(
|
||||
self, channel: ChannelType, signal: str, func: CALLABLE_T, signal_override=False
|
||||
):
|
||||
@@ -158,7 +156,7 @@ class BaseZhaEntity(RestoreEntity, LogMixin, entity.Entity):
|
||||
_LOGGER.log(level, msg, *args)
|
||||
|
||||
|
||||
class ZhaEntity(BaseZhaEntity):
|
||||
class ZhaEntity(BaseZhaEntity, RestoreEntity):
|
||||
"""A base class for non group ZHA entities."""
|
||||
|
||||
def __init__(
|
||||
@@ -181,6 +179,13 @@ class ZhaEntity(BaseZhaEntity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.remove_future = asyncio.Future()
|
||||
await self.async_accept_signal(
|
||||
None,
|
||||
f"{SIGNAL_REMOVE}_{self.zha_device.ieee}",
|
||||
self.async_remove,
|
||||
signal_override=True,
|
||||
)
|
||||
await self.async_check_recently_seen()
|
||||
await self.async_accept_signal(
|
||||
None,
|
||||
@@ -197,6 +202,16 @@ class ZhaEntity(BaseZhaEntity):
|
||||
self.remove_future,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect entity object when removed."""
|
||||
await super().async_will_remove_from_hass()
|
||||
self.zha_device.gateway.remove_entity_reference(self)
|
||||
self.remove_future.set_result(True)
|
||||
|
||||
@callback
|
||||
def async_restore_last_state(self, last_state) -> None:
|
||||
"""Restore previous state."""
|
||||
|
||||
async def async_check_recently_seen(self) -> None:
|
||||
"""Check if the device was seen within the last 2 hours."""
|
||||
last_state = await self.async_get_last_state()
|
||||
@@ -246,13 +261,20 @@ class ZhaGroupEntity(BaseZhaEntity):
|
||||
await self.async_accept_signal(
|
||||
None,
|
||||
f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{self._group_id:04x}",
|
||||
self._update_group_entities,
|
||||
self.async_remove,
|
||||
signal_override=True,
|
||||
)
|
||||
|
||||
self._async_unsub_state_changed = async_track_state_change(
|
||||
self.hass, self._entity_ids, self.async_state_changed_listener
|
||||
)
|
||||
|
||||
def send_removed_signal():
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_GROUP_ENTITY_REMOVED, self._group_id
|
||||
)
|
||||
|
||||
self.async_on_remove(send_removed_signal)
|
||||
await self.async_update()
|
||||
|
||||
@callback
|
||||
@@ -262,17 +284,6 @@ class ZhaGroupEntity(BaseZhaEntity):
|
||||
"""Handle child updates."""
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
def _update_group_entities(self):
|
||||
"""Update tracked entities when membership changes."""
|
||||
group = self.zha_device.gateway.get_group(self._group_id)
|
||||
self._entity_ids = group.get_domain_entity_ids(self.platform.domain)
|
||||
if self._async_unsub_state_changed is not None:
|
||||
self._async_unsub_state_changed()
|
||||
|
||||
self._async_unsub_state_changed = async_track_state_change(
|
||||
self.hass, self._entity_ids, self.async_state_changed_listener
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle removal from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"zha-quirks==0.0.38",
|
||||
"zigpy-cc==0.3.1",
|
||||
"zigpy-deconz==0.8.0",
|
||||
"zigpy-homeassistant==0.18.1",
|
||||
"zigpy-homeassistant==0.18.2",
|
||||
"zigpy-xbee-homeassistant==0.11.0",
|
||||
"zigpy-zigate==0.5.1"
|
||||
],
|
||||
|
||||
@@ -104,7 +104,7 @@ def byte_to_zwave_brightness(value):
|
||||
`value` -- (int) Brightness byte value from 0-255.
|
||||
"""
|
||||
if value > 0:
|
||||
return max(1, int((value / 255) * 99))
|
||||
return max(1, round((value / 255) * 99))
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 108
|
||||
PATCH_VERSION = "0"
|
||||
PATCH_VERSION = "5"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
||||
@@ -12,7 +12,7 @@ cryptography==2.8
|
||||
defusedxml==0.6.0
|
||||
distro==1.4.0
|
||||
hass-nabucasa==0.32.2
|
||||
home-assistant-frontend==20200407.1
|
||||
home-assistant-frontend==20200407.2
|
||||
importlib-metadata==1.5.0
|
||||
jinja2>=2.11.1
|
||||
netdisco==2.6.0
|
||||
|
||||
@@ -35,7 +35,7 @@ Adafruit-SHT31==1.0.2
|
||||
# Adafruit_BBIO==1.1.1
|
||||
|
||||
# homeassistant.components.homekit
|
||||
HAP-python==2.8.1
|
||||
HAP-python==2.8.2
|
||||
|
||||
# homeassistant.components.mastodon
|
||||
Mastodon.py==1.5.0
|
||||
@@ -208,7 +208,7 @@ aiopvpc==1.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==2019.4.26
|
||||
aioswitcher==1.1.0
|
||||
|
||||
# homeassistant.components.unifi
|
||||
aiounifi==15
|
||||
@@ -704,7 +704,7 @@ hole==0.5.1
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200407.1
|
||||
home-assistant-frontend==20200407.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -922,7 +922,7 @@ netdisco==2.6.0
|
||||
neurio==0.3.1
|
||||
|
||||
# homeassistant.components.nexia
|
||||
nexia==0.8.0
|
||||
nexia==0.9.1
|
||||
|
||||
# homeassistant.components.nextcloud
|
||||
nextcloudmonitor==1.1.0
|
||||
@@ -1218,7 +1218,7 @@ pycsspeechtts==1.0.3
|
||||
# pycups==1.9.73
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==1.6.2
|
||||
pydaikin==1.6.3
|
||||
|
||||
# homeassistant.components.danfoss_air
|
||||
pydanfossair==0.1.0
|
||||
@@ -1330,13 +1330,13 @@ pyialarm==0.3
|
||||
pyicloud==0.9.6.1
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.7.1
|
||||
pyintesishome==1.7.3
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==2.0.5
|
||||
|
||||
# homeassistant.components.ipp
|
||||
pyipp==0.9.0
|
||||
pyipp==0.10.1
|
||||
|
||||
# homeassistant.components.iqvia
|
||||
pyiqvia==0.2.1
|
||||
@@ -1378,7 +1378,7 @@ pylitejet==0.1
|
||||
pyloopenergy==0.1.3
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.6.0
|
||||
pylutron-caseta==0.6.1
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.5
|
||||
@@ -1738,7 +1738,7 @@ pyversasense==0.0.6
|
||||
pyvesync==1.1.0
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.44
|
||||
pyvizio==0.1.46
|
||||
|
||||
# homeassistant.components.velux
|
||||
pyvlx==0.2.12
|
||||
@@ -2191,7 +2191,7 @@ zigpy-cc==0.3.1
|
||||
zigpy-deconz==0.8.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-homeassistant==0.18.1
|
||||
zigpy-homeassistant==0.18.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee-homeassistant==0.11.0
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
-r requirements_test.txt
|
||||
|
||||
# homeassistant.components.homekit
|
||||
HAP-python==2.8.1
|
||||
HAP-python==2.8.2
|
||||
|
||||
# homeassistant.components.mobile_app
|
||||
# homeassistant.components.owntracks
|
||||
@@ -91,7 +91,7 @@ aiopvpc==1.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==2019.4.26
|
||||
aioswitcher==1.1.0
|
||||
|
||||
# homeassistant.components.unifi
|
||||
aiounifi==15
|
||||
@@ -282,7 +282,7 @@ hole==0.5.1
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200407.1
|
||||
home-assistant-frontend==20200407.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -357,7 +357,7 @@ nessclient==0.9.15
|
||||
netdisco==2.6.0
|
||||
|
||||
# homeassistant.components.nexia
|
||||
nexia==0.8.0
|
||||
nexia==0.9.1
|
||||
|
||||
# homeassistant.components.nsw_fuel_station
|
||||
nsw-fuel-api-client==1.0.10
|
||||
@@ -479,7 +479,7 @@ pychromecast==4.2.0
|
||||
pycoolmasternet==0.0.4
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==1.6.2
|
||||
pydaikin==1.6.3
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==70
|
||||
@@ -519,7 +519,7 @@ pyicloud==0.9.6.1
|
||||
pyipma==2.0.5
|
||||
|
||||
# homeassistant.components.ipp
|
||||
pyipp==0.9.0
|
||||
pyipp==0.10.1
|
||||
|
||||
# homeassistant.components.iqvia
|
||||
pyiqvia==0.2.1
|
||||
@@ -647,7 +647,7 @@ pyvera==0.3.7
|
||||
pyvesync==1.1.0
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.44
|
||||
pyvizio==0.1.46
|
||||
|
||||
# homeassistant.components.html5
|
||||
pywebpush==1.9.2
|
||||
@@ -807,7 +807,7 @@ zigpy-cc==0.3.1
|
||||
zigpy-deconz==0.8.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-homeassistant==0.18.1
|
||||
zigpy-homeassistant==0.18.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-xbee-homeassistant==0.11.0
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
#!/usr/bin/execlineb -S1
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Home Assistant fails
|
||||
# ==============================================================================
|
||||
if { s6-test ${1} -ne 100 }
|
||||
if { s6-test ${1} -ne 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
||||
s6-svscanctl -t /var/run/s6/services
|
||||
|
||||
23
rootfs/init
Executable file
23
rootfs/init
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/bin/execlineb -S0
|
||||
|
||||
##
|
||||
## load default PATH (the same that Docker includes if not provided) if it doesn't exist,
|
||||
## then go ahead with stage1.
|
||||
## this was motivated due to this issue:
|
||||
## - https://github.com/just-containers/s6-overlay/issues/108
|
||||
##
|
||||
|
||||
/bin/importas -D /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin PATH PATH
|
||||
export PATH ${PATH}
|
||||
|
||||
##
|
||||
## Skip further init if the user has a given CMD.
|
||||
## This is to prevent Home Assistant from starting twice if the user
|
||||
## decided to override/start via the CMD.
|
||||
##
|
||||
|
||||
ifelse { s6-test $# -ne 0 }
|
||||
{
|
||||
$@
|
||||
}
|
||||
/etc/s6/init/init-stage1 $@
|
||||
@@ -687,20 +687,30 @@ async def test_list_google_entities(hass, hass_ws_client, setup_api, mock_cloud_
|
||||
entity = GoogleEntity(
|
||||
hass, MockConfig(should_expose=lambda *_: False), State("light.kitchen", "on")
|
||||
)
|
||||
entity2 = GoogleEntity(
|
||||
hass,
|
||||
MockConfig(should_expose=lambda *_: True, should_2fa=lambda *_: False),
|
||||
State("cover.garage", "open", {"device_class": "garage"}),
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.google_assistant.helpers.async_get_entities",
|
||||
return_value=[entity],
|
||||
return_value=[entity, entity2],
|
||||
):
|
||||
await client.send_json({"id": 5, "type": "cloud/google_assistant/entities"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert len(response["result"]) == 1
|
||||
assert len(response["result"]) == 2
|
||||
assert response["result"][0] == {
|
||||
"entity_id": "light.kitchen",
|
||||
"might_2fa": False,
|
||||
"traits": ["action.devices.traits.OnOff"],
|
||||
}
|
||||
assert response["result"][1] == {
|
||||
"entity_id": "cover.garage",
|
||||
"might_2fa": True,
|
||||
"traits": ["action.devices.traits.OpenClose"],
|
||||
}
|
||||
|
||||
|
||||
async def test_update_google_entity(hass, hass_ws_client, setup_api, mock_cloud_login):
|
||||
|
||||
@@ -127,6 +127,73 @@ async def test_form_import(hass):
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_import_with_zeroconf_already_discovered(hass):
|
||||
"""Test we get the form with import source."""
|
||||
await hass.async_add_executor_job(
|
||||
init_recorder_component, hass
|
||||
) # force in memory db
|
||||
|
||||
await setup.async_setup_component(hass, "persistent_notification", {})
|
||||
|
||||
# Running the zeroconf init will make the unique id
|
||||
# in progress
|
||||
zero_conf = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data={
|
||||
"properties": {"macaddress": "1CCAE3DOORBIRD"},
|
||||
"name": "Doorstation - abc123._axis-video._tcp.local.",
|
||||
"host": "192.168.1.5",
|
||||
},
|
||||
)
|
||||
assert zero_conf["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert zero_conf["step_id"] == "user"
|
||||
assert zero_conf["errors"] == {}
|
||||
|
||||
import_config = VALID_CONFIG.copy()
|
||||
import_config[CONF_EVENTS] = ["event1", "event2", "event3"]
|
||||
import_config[CONF_TOKEN] = "imported_token"
|
||||
import_config[
|
||||
CONF_CUSTOM_URL
|
||||
] = "http://legacy.custom.url/should/only/come/in/from/yaml"
|
||||
|
||||
doorbirdapi = _get_mock_doorbirdapi_return_values(
|
||||
ready=[True], info={"WIFI_MAC_ADDR": "1CCAE3DOORBIRD"}
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.doorbird.config_flow.DoorBird",
|
||||
return_value=doorbirdapi,
|
||||
), patch("homeassistant.components.logbook.async_setup", return_value=True), patch(
|
||||
"homeassistant.components.doorbird.async_setup", return_value=True
|
||||
) as mock_setup, patch(
|
||||
"homeassistant.components.doorbird.async_setup_entry", return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data=import_config,
|
||||
)
|
||||
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["title"] == "1.2.3.4"
|
||||
assert result["data"] == {
|
||||
"host": "1.2.3.4",
|
||||
"name": "mydoorbird",
|
||||
"password": "password",
|
||||
"username": "friend",
|
||||
"events": ["event1", "event2", "event3"],
|
||||
"token": "imported_token",
|
||||
# This will go away once we convert to cloud hooks
|
||||
"hass_url_override": "http://legacy.custom.url/should/only/come/in/from/yaml",
|
||||
}
|
||||
# It is not possible to import options at this time
|
||||
# so they end up in the config entry data and are
|
||||
# used a fallback when they are not in options
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_zeroconf_wrong_oui(hass):
|
||||
"""Test we abort when we get the wrong OUI via zeroconf."""
|
||||
await hass.async_add_executor_job(
|
||||
|
||||
@@ -33,6 +33,7 @@ class MockConfig(helpers.AbstractConfig):
|
||||
"""Initialize config."""
|
||||
super().__init__(hass)
|
||||
self._should_expose = should_expose
|
||||
self._should_2fa = should_2fa
|
||||
self._secure_devices_pin = secure_devices_pin
|
||||
self._entity_config = entity_config or {}
|
||||
self._local_sdk_webhook_id = local_sdk_webhook_id
|
||||
@@ -73,6 +74,10 @@ class MockConfig(helpers.AbstractConfig):
|
||||
"""Expose it all."""
|
||||
return self._should_expose is None or self._should_expose(state)
|
||||
|
||||
def should_2fa(self, state):
|
||||
"""Expose it all."""
|
||||
return self._should_2fa is None or self._should_2fa(state)
|
||||
|
||||
|
||||
BASIC_CONFIG = MockConfig()
|
||||
|
||||
|
||||
@@ -845,10 +845,8 @@ async def test_lock_unlock_unlock(hass):
|
||||
assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP
|
||||
|
||||
# Test with 2FA override
|
||||
with patch(
|
||||
"homeassistant.components.google_assistant.helpers"
|
||||
".AbstractConfig.should_2fa",
|
||||
return_value=False,
|
||||
with patch.object(
|
||||
BASIC_CONFIG, "should_2fa", return_value=False,
|
||||
):
|
||||
await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {})
|
||||
assert len(calls) == 2
|
||||
|
||||
@@ -80,6 +80,8 @@ def _build_mock_url(origin, destination, modes, api_key, departure=None, arrival
|
||||
parameters["arrival"] = arrival
|
||||
if departure is not None:
|
||||
parameters["departure"] = departure
|
||||
if departure is None and arrival is None:
|
||||
parameters["departure"] = "now"
|
||||
url = base_url + urllib.parse.urlencode(parameters)
|
||||
print(url)
|
||||
return url
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for the IPP config flow."""
|
||||
import aiohttp
|
||||
from pyipp import IPPConnectionUpgradeRequired
|
||||
from pyipp import IPPConnectionUpgradeRequired, IPPError
|
||||
|
||||
from homeassistant.components.ipp.const import CONF_BASE_PATH, CONF_UUID, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
|
||||
@@ -172,6 +172,74 @@ async def test_zeroconf_parse_error(
|
||||
assert result["reason"] == "parse_error"
|
||||
|
||||
|
||||
async def test_user_ipp_error(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort the user flow on IPP error."""
|
||||
aioclient_mock.post("http://192.168.1.31:631/ipp/print", exc=IPPError)
|
||||
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "ipp_error"
|
||||
|
||||
|
||||
async def test_zeroconf_ipp_error(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort zeroconf flow on IPP error."""
|
||||
aioclient_mock.post("http://192.168.1.31:631/ipp/print", exc=IPPError)
|
||||
|
||||
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "ipp_error"
|
||||
|
||||
|
||||
async def test_user_ipp_version_error(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort user flow on IPP version not supported error."""
|
||||
aioclient_mock.post(
|
||||
"http://192.168.1.31:631/ipp/print",
|
||||
content=load_fixture_binary("ipp/get-printer-attributes-error-0x0503.bin"),
|
||||
headers={"Content-Type": "application/ipp"},
|
||||
)
|
||||
|
||||
user_input = {**MOCK_USER_INPUT}
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "ipp_version_error"
|
||||
|
||||
|
||||
async def test_zeroconf_ipp_version_error(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort zeroconf flow on IPP version not supported error."""
|
||||
aioclient_mock.post(
|
||||
"http://192.168.1.31:631/ipp/print",
|
||||
content=load_fixture_binary("ipp/get-printer-attributes-error-0x0503.bin"),
|
||||
headers={"Content-Type": "application/ipp"},
|
||||
)
|
||||
|
||||
discovery_info = {**MOCK_ZEROCONF_IPP_SERVICE_INFO}
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "ipp_version_error"
|
||||
|
||||
|
||||
async def test_user_device_exists_abort(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
|
||||
@@ -362,10 +362,11 @@ async def test_ssdp_host_update(hass, mock_panel):
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
|
||||
# confirm the host value was updated
|
||||
# confirm the host value was updated, access_token was not
|
||||
entry = hass.config_entries.async_entries(config_flow.DOMAIN)[0]
|
||||
assert entry.data["host"] == "1.1.1.1"
|
||||
assert entry.data["port"] == 1234
|
||||
assert entry.data["access_token"] == "11223344556677889900"
|
||||
|
||||
|
||||
async def test_import_existing_config(hass, mock_panel):
|
||||
@@ -494,6 +495,7 @@ async def test_import_existing_config_entry(hass, mock_panel):
|
||||
data={
|
||||
"host": "0.0.0.0",
|
||||
"port": 1111,
|
||||
"access_token": "ORIGINALTOKEN",
|
||||
"id": "112233445566",
|
||||
"extra": "something",
|
||||
},
|
||||
@@ -546,14 +548,14 @@ async def test_import_existing_config_entry(hass, mock_panel):
|
||||
|
||||
assert result["type"] == "abort"
|
||||
|
||||
# We should have updated the entry
|
||||
# We should have updated the host info but not the access token
|
||||
assert len(hass.config_entries.async_entries("konnected")) == 1
|
||||
assert hass.config_entries.async_entries("konnected")[0].data == {
|
||||
"host": "1.2.3.4",
|
||||
"port": 1234,
|
||||
"access_token": "ORIGINALTOKEN",
|
||||
"id": "112233445566",
|
||||
"model": "Konnected Pro",
|
||||
"access_token": "SUPERSECRETTOKEN",
|
||||
"extra": "something",
|
||||
}
|
||||
|
||||
|
||||
90
tests/components/modbus/conftest.py
Normal file
90
tests/components/modbus/conftest.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""The tests for the Modbus sensor component."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.modbus.const import (
|
||||
CALL_TYPE_REGISTER_INPUT,
|
||||
CONF_REGISTER,
|
||||
CONF_REGISTER_TYPE,
|
||||
CONF_REGISTERS,
|
||||
DEFAULT_HUB,
|
||||
MODBUS_DOMAIN as DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PLATFORM, CONF_SCAN_INTERVAL
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import MockModule, async_fire_time_changed, mock_integration
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_hub(hass):
|
||||
"""Mock hub."""
|
||||
mock_integration(hass, MockModule(DOMAIN))
|
||||
hub = mock.MagicMock()
|
||||
hub.name = "hub"
|
||||
hass.data[DOMAIN] = {DEFAULT_HUB: hub}
|
||||
return hub
|
||||
|
||||
|
||||
class ReadResult:
|
||||
"""Storage class for register read results."""
|
||||
|
||||
def __init__(self, register_words):
|
||||
"""Init."""
|
||||
self.registers = register_words
|
||||
|
||||
|
||||
read_result = None
|
||||
|
||||
|
||||
async def run_test(
|
||||
hass, use_mock_hub, register_config, entity_domain, register_words, expected
|
||||
):
|
||||
"""Run test for given config and check that sensor outputs expected result."""
|
||||
|
||||
async def simulate_read_registers(unit, address, count):
|
||||
"""Simulate modbus register read."""
|
||||
del unit, address, count # not used in simulation, but in real connection
|
||||
return read_result
|
||||
|
||||
# Full sensor configuration
|
||||
sensor_name = "modbus_test_sensor"
|
||||
scan_interval = 5
|
||||
config = {
|
||||
entity_domain: {
|
||||
CONF_PLATFORM: "modbus",
|
||||
CONF_SCAN_INTERVAL: scan_interval,
|
||||
CONF_REGISTERS: [
|
||||
dict(**{CONF_NAME: sensor_name, CONF_REGISTER: 1234}, **register_config)
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
# Setup inputs for the sensor
|
||||
read_result = ReadResult(register_words)
|
||||
if register_config.get(CONF_REGISTER_TYPE) == CALL_TYPE_REGISTER_INPUT:
|
||||
use_mock_hub.read_input_registers = simulate_read_registers
|
||||
else:
|
||||
use_mock_hub.read_holding_registers = simulate_read_registers
|
||||
|
||||
# Initialize sensor
|
||||
now = dt_util.utcnow()
|
||||
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
|
||||
assert await async_setup_component(hass, entity_domain, config)
|
||||
|
||||
# Trigger update call with time_changed event
|
||||
now += timedelta(seconds=scan_interval + 1)
|
||||
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
|
||||
async_fire_time_changed(hass, now)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check state
|
||||
entity_id = f"{entity_domain}.{sensor_name}"
|
||||
state = hass.states.get(entity_id).state
|
||||
assert state == expected
|
||||
@@ -1,8 +1,5 @@
|
||||
"""The tests for the Modbus sensor component."""
|
||||
from datetime import timedelta
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
import logging
|
||||
|
||||
from homeassistant.components.modbus.const import (
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
@@ -11,78 +8,18 @@ from homeassistant.components.modbus.const import (
|
||||
CONF_DATA_TYPE,
|
||||
CONF_OFFSET,
|
||||
CONF_PRECISION,
|
||||
CONF_REGISTER,
|
||||
CONF_REGISTER_TYPE,
|
||||
CONF_REGISTERS,
|
||||
CONF_REVERSE_ORDER,
|
||||
CONF_SCALE,
|
||||
DATA_TYPE_FLOAT,
|
||||
DATA_TYPE_INT,
|
||||
DATA_TYPE_UINT,
|
||||
DEFAULT_HUB,
|
||||
MODBUS_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PLATFORM, CONF_SCAN_INTERVAL
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
|
||||
from tests.common import MockModule, async_fire_time_changed, mock_integration
|
||||
from .conftest import run_test
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_hub(hass):
|
||||
"""Mock hub."""
|
||||
mock_integration(hass, MockModule(MODBUS_DOMAIN))
|
||||
hub = mock.MagicMock()
|
||||
hub.name = "hub"
|
||||
hass.data[MODBUS_DOMAIN] = {DEFAULT_HUB: hub}
|
||||
return hub
|
||||
|
||||
|
||||
common_register_config = {CONF_NAME: "test-config", CONF_REGISTER: 1234}
|
||||
|
||||
|
||||
class ReadResult:
|
||||
"""Storage class for register read results."""
|
||||
|
||||
def __init__(self, register_words):
|
||||
"""Init."""
|
||||
self.registers = register_words
|
||||
|
||||
|
||||
async def run_test(hass, mock_hub, register_config, register_words, expected):
|
||||
"""Run test for given config and check that sensor outputs expected result."""
|
||||
|
||||
# Full sensor configuration
|
||||
sensor_name = "modbus_test_sensor"
|
||||
scan_interval = 5
|
||||
config = {
|
||||
MODBUS_DOMAIN: {
|
||||
CONF_PLATFORM: "modbus",
|
||||
CONF_SCAN_INTERVAL: scan_interval,
|
||||
CONF_REGISTERS: [
|
||||
dict(**{CONF_NAME: sensor_name, CONF_REGISTER: 1234}, **register_config)
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
# Setup inputs for the sensor
|
||||
read_result = ReadResult(register_words)
|
||||
if register_config.get(CONF_REGISTER_TYPE) == CALL_TYPE_REGISTER_INPUT:
|
||||
mock_hub.read_input_registers.return_value = read_result
|
||||
else:
|
||||
mock_hub.read_holding_registers.return_value = read_result
|
||||
|
||||
# Initialize sensor
|
||||
now = dt_util.utcnow()
|
||||
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
|
||||
assert await async_setup_component(hass, MODBUS_DOMAIN, config)
|
||||
|
||||
# Trigger update call with time_changed event
|
||||
now += timedelta(seconds=scan_interval + 1)
|
||||
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
|
||||
async_fire_time_changed(hass, now)
|
||||
await hass.async_block_till_done()
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def test_simple_word_register(hass, mock_hub):
|
||||
@@ -94,14 +31,26 @@ async def test_simple_word_register(hass, mock_hub):
|
||||
CONF_OFFSET: 0,
|
||||
CONF_PRECISION: 0,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[0], expected="0")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0],
|
||||
expected="0",
|
||||
)
|
||||
|
||||
|
||||
async def test_optional_conf_keys(hass, mock_hub):
|
||||
"""Test handling of optional configuration keys."""
|
||||
register_config = {}
|
||||
await run_test(
|
||||
hass, mock_hub, register_config, register_words=[0x8000], expected="-32768"
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x8000],
|
||||
expected="-32768",
|
||||
)
|
||||
|
||||
|
||||
@@ -114,7 +63,14 @@ async def test_offset(hass, mock_hub):
|
||||
CONF_OFFSET: 13,
|
||||
CONF_PRECISION: 0,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[7], expected="20")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[7],
|
||||
expected="20",
|
||||
)
|
||||
|
||||
|
||||
async def test_scale_and_offset(hass, mock_hub):
|
||||
@@ -126,7 +82,14 @@ async def test_scale_and_offset(hass, mock_hub):
|
||||
CONF_OFFSET: 13,
|
||||
CONF_PRECISION: 0,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[7], expected="34")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[7],
|
||||
expected="34",
|
||||
)
|
||||
|
||||
|
||||
async def test_ints_can_have_precision(hass, mock_hub):
|
||||
@@ -139,7 +102,12 @@ async def test_ints_can_have_precision(hass, mock_hub):
|
||||
CONF_PRECISION: 4,
|
||||
}
|
||||
await run_test(
|
||||
hass, mock_hub, register_config, register_words=[7], expected="34.0000"
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[7],
|
||||
expected="34.0000",
|
||||
)
|
||||
|
||||
|
||||
@@ -152,7 +120,14 @@ async def test_floats_get_rounded_correctly(hass, mock_hub):
|
||||
CONF_OFFSET: 0,
|
||||
CONF_PRECISION: 0,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[1], expected="2")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[1],
|
||||
expected="2",
|
||||
)
|
||||
|
||||
|
||||
async def test_parameters_as_strings(hass, mock_hub):
|
||||
@@ -164,7 +139,14 @@ async def test_parameters_as_strings(hass, mock_hub):
|
||||
CONF_OFFSET: "5",
|
||||
CONF_PRECISION: "1",
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[9], expected="18.5")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[9],
|
||||
expected="18.5",
|
||||
)
|
||||
|
||||
|
||||
async def test_floating_point_scale(hass, mock_hub):
|
||||
@@ -176,7 +158,14 @@ async def test_floating_point_scale(hass, mock_hub):
|
||||
CONF_OFFSET: 0,
|
||||
CONF_PRECISION: 2,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[1], expected="2.40")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[1],
|
||||
expected="2.40",
|
||||
)
|
||||
|
||||
|
||||
async def test_floating_point_offset(hass, mock_hub):
|
||||
@@ -188,7 +177,14 @@ async def test_floating_point_offset(hass, mock_hub):
|
||||
CONF_OFFSET: -10.3,
|
||||
CONF_PRECISION: 1,
|
||||
}
|
||||
await run_test(hass, mock_hub, register_config, register_words=[2], expected="-8.3")
|
||||
await run_test(
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[2],
|
||||
expected="-8.3",
|
||||
)
|
||||
|
||||
|
||||
async def test_signed_two_word_register(hass, mock_hub):
|
||||
@@ -204,6 +200,7 @@ async def test_signed_two_word_register(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF],
|
||||
expected="-1985229329",
|
||||
)
|
||||
@@ -222,6 +219,7 @@ async def test_unsigned_two_word_register(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF],
|
||||
expected=str(0x89ABCDEF),
|
||||
)
|
||||
@@ -238,6 +236,7 @@ async def test_reversed(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF],
|
||||
expected=str(0xCDEF89AB),
|
||||
)
|
||||
@@ -256,6 +255,7 @@ async def test_four_word_register(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF, 0x0123, 0x4567],
|
||||
expected="9920249030613615975",
|
||||
)
|
||||
@@ -274,6 +274,7 @@ async def test_four_word_register_precision_is_intact_with_int_params(hass, mock
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x0123, 0x4567, 0x89AB, 0xCDEF],
|
||||
expected="163971058432973793",
|
||||
)
|
||||
@@ -292,6 +293,7 @@ async def test_four_word_register_precision_is_lost_with_float_params(hass, mock
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x0123, 0x4567, 0x89AB, 0xCDEF],
|
||||
expected="163971058432973792",
|
||||
)
|
||||
@@ -311,6 +313,7 @@ async def test_two_word_input_register(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF],
|
||||
expected=str(0x89ABCDEF),
|
||||
)
|
||||
@@ -330,6 +333,7 @@ async def test_two_word_holding_register(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[0x89AB, 0xCDEF],
|
||||
expected=str(0x89ABCDEF),
|
||||
)
|
||||
@@ -349,6 +353,7 @@ async def test_float_data_type(hass, mock_hub):
|
||||
hass,
|
||||
mock_hub,
|
||||
register_config,
|
||||
SENSOR_DOMAIN,
|
||||
register_words=[16286, 1617],
|
||||
expected="1.23457",
|
||||
)
|
||||
|
||||
@@ -294,6 +294,58 @@ async def test_update(hass):
|
||||
assert "three" == state.attributes[ATTR_INPUT_SOURCE]
|
||||
|
||||
|
||||
async def test_failed_update(hass):
|
||||
"""Test updating failure from monoprice."""
|
||||
monoprice = MockMonoprice()
|
||||
await _setup_monoprice(hass, monoprice)
|
||||
|
||||
# Changing media player to new state
|
||||
await _call_media_player_service(
|
||||
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
|
||||
)
|
||||
await _call_media_player_service(
|
||||
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
|
||||
)
|
||||
|
||||
monoprice.set_source(11, 3)
|
||||
monoprice.set_volume(11, 38)
|
||||
|
||||
with patch.object(MockMonoprice, "zone_status", side_effect=SerialException):
|
||||
await async_update_entity(hass, ZONE_1_ID)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(ZONE_1_ID)
|
||||
|
||||
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.0
|
||||
assert state.attributes[ATTR_INPUT_SOURCE] == "one"
|
||||
|
||||
|
||||
async def test_empty_update(hass):
|
||||
"""Test updating with no state from monoprice."""
|
||||
monoprice = MockMonoprice()
|
||||
await _setup_monoprice(hass, monoprice)
|
||||
|
||||
# Changing media player to new state
|
||||
await _call_media_player_service(
|
||||
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
|
||||
)
|
||||
await _call_media_player_service(
|
||||
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
|
||||
)
|
||||
|
||||
monoprice.set_source(11, 3)
|
||||
monoprice.set_volume(11, 38)
|
||||
|
||||
with patch.object(MockMonoprice, "zone_status", return_value=None):
|
||||
await async_update_entity(hass, ZONE_1_ID)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(ZONE_1_ID)
|
||||
|
||||
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.0
|
||||
assert state.attributes[ATTR_INPUT_SOURCE] == "one"
|
||||
|
||||
|
||||
async def test_supported_features(hass):
|
||||
"""Test supported features property."""
|
||||
await _setup_monoprice(hass, MockMonoprice())
|
||||
|
||||
@@ -18,8 +18,8 @@ async def test_climate_zones(hass):
|
||||
"current_temperature": 22.8,
|
||||
"dehumidify_setpoint": 45.0,
|
||||
"dehumidify_supported": True,
|
||||
"fan_mode": "auto",
|
||||
"fan_modes": ["auto", "on", "circulate"],
|
||||
"fan_mode": "Auto",
|
||||
"fan_modes": ["Auto", "On", "Circulate"],
|
||||
"friendly_name": "Nick Office",
|
||||
"humidify_supported": False,
|
||||
"humidity": 45.0,
|
||||
@@ -53,8 +53,8 @@ async def test_climate_zones(hass):
|
||||
"current_temperature": 25.0,
|
||||
"dehumidify_setpoint": 50.0,
|
||||
"dehumidify_supported": True,
|
||||
"fan_mode": "auto",
|
||||
"fan_modes": ["auto", "on", "circulate"],
|
||||
"fan_mode": "Auto",
|
||||
"fan_modes": ["Auto", "On", "Circulate"],
|
||||
"friendly_name": "Kitchen",
|
||||
"humidify_supported": False,
|
||||
"humidity": 50.0,
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
"""Common fixtures and functions for Plex tests."""
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.components.plex.const import (
|
||||
DEBOUNCE_TIMEOUT,
|
||||
PLEX_UPDATE_PLATFORMS_SIGNAL,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
|
||||
async def trigger_plex_update(hass, server_id):
|
||||
"""Update Plex by sending signal and jumping ahead by debounce timeout."""
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
next_update = dt_util.utcnow() + timedelta(seconds=DEBOUNCE_TIMEOUT)
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
@@ -15,13 +15,14 @@ from homeassistant.components.plex.const import (
|
||||
CONF_USE_EPISODE_ART,
|
||||
DOMAIN,
|
||||
PLEX_SERVER_CONFIG,
|
||||
PLEX_UPDATE_PLATFORMS_SIGNAL,
|
||||
SERVERS,
|
||||
)
|
||||
from homeassistant.config_entries import ENTRY_STATE_LOADED
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN, CONF_URL
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .common import trigger_plex_update
|
||||
from .const import DEFAULT_DATA, DEFAULT_OPTIONS, MOCK_SERVERS, MOCK_TOKEN
|
||||
from .mock_classes import MockPlexAccount, MockPlexServer
|
||||
|
||||
@@ -415,7 +416,8 @@ async def test_option_flow_new_users_available(hass, caplog):
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users
|
||||
|
||||
|
||||
@@ -3,8 +3,9 @@ import copy
|
||||
from datetime import timedelta
|
||||
import ssl
|
||||
|
||||
from asynctest import patch
|
||||
from asynctest import ClockedTestCase, patch
|
||||
import plexapi
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
|
||||
@@ -23,14 +24,19 @@ from homeassistant.const import (
|
||||
CONF_URL,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .common import trigger_plex_update
|
||||
from .const import DEFAULT_DATA, DEFAULT_OPTIONS, MOCK_SERVERS, MOCK_TOKEN
|
||||
from .mock_classes import MockPlexAccount, MockPlexServer
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_fire_time_changed,
|
||||
async_test_home_assistant,
|
||||
mock_storage,
|
||||
)
|
||||
|
||||
|
||||
async def test_setup_with_config(hass):
|
||||
@@ -67,70 +73,90 @@ async def test_setup_with_config(hass):
|
||||
|
||||
assert loaded_server.plex_server == mock_plex_server
|
||||
|
||||
assert server_id in hass.data[const.DOMAIN][const.DISPATCHERS]
|
||||
assert server_id in hass.data[const.DOMAIN][const.WEBSOCKETS]
|
||||
assert (
|
||||
hass.data[const.DOMAIN][const.PLATFORMS_COMPLETED][server_id] == const.PLATFORMS
|
||||
)
|
||||
|
||||
class TestClockedPlex(ClockedTestCase):
|
||||
"""Create clock-controlled asynctest class."""
|
||||
|
||||
async def test_setup_with_config_entry(hass, caplog):
|
||||
"""Test setup component with config."""
|
||||
@pytest.fixture(autouse=True)
|
||||
def inject_fixture(self, caplog):
|
||||
"""Inject pytest fixtures as instance attributes."""
|
||||
self.caplog = caplog
|
||||
|
||||
mock_plex_server = MockPlexServer()
|
||||
async def setUp(self):
|
||||
"""Initialize this test class."""
|
||||
self.hass = await async_test_home_assistant(self.loop)
|
||||
self.mock_storage = mock_storage()
|
||||
self.mock_storage.__enter__()
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=const.DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
async def tearDown(self):
|
||||
"""Clean up the HomeAssistant instance."""
|
||||
await self.hass.async_stop()
|
||||
self.mock_storage.__exit__(None, None, None)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
) as mock_listen:
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
async def test_setup_with_config_entry(self):
|
||||
"""Test setup component with config."""
|
||||
hass = self.hass
|
||||
|
||||
mock_plex_server = MockPlexServer()
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=const.DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
) as mock_listen:
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_listen.called
|
||||
|
||||
assert len(hass.config_entries.async_entries(const.DOMAIN)) == 1
|
||||
assert entry.state == ENTRY_STATE_LOADED
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
loaded_server = hass.data[const.DOMAIN][const.SERVERS][server_id]
|
||||
|
||||
assert loaded_server.plex_server == mock_plex_server
|
||||
|
||||
async_dispatcher_send(
|
||||
hass, const.PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_listen.called
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == str(len(mock_plex_server.accounts))
|
||||
|
||||
assert len(hass.config_entries.async_entries(const.DOMAIN)) == 1
|
||||
assert entry.state == ENTRY_STATE_LOADED
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
loaded_server = hass.data[const.DOMAIN][const.SERVERS][server_id]
|
||||
|
||||
assert loaded_server.plex_server == mock_plex_server
|
||||
|
||||
assert server_id in hass.data[const.DOMAIN][const.DISPATCHERS]
|
||||
assert server_id in hass.data[const.DOMAIN][const.WEBSOCKETS]
|
||||
assert (
|
||||
hass.data[const.DOMAIN][const.PLATFORMS_COMPLETED][server_id] == const.PLATFORMS
|
||||
)
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == str(len(mock_plex_server.accounts))
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
|
||||
for test_exception in (
|
||||
plexapi.exceptions.BadRequest,
|
||||
requests.exceptions.RequestException,
|
||||
):
|
||||
with patch.object(
|
||||
mock_plex_server, "clients", side_effect=test_exception
|
||||
) as patched_clients_bad_request:
|
||||
await trigger_plex_update(hass, server_id)
|
||||
|
||||
assert patched_clients_bad_request.called
|
||||
assert (
|
||||
f"Could not connect to Plex server: {mock_plex_server.friendlyName}"
|
||||
in caplog.text
|
||||
# Ensure existing entities refresh
|
||||
await self.advance(const.DEBOUNCE_TIMEOUT)
|
||||
async_dispatcher_send(
|
||||
hass, const.PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)
|
||||
)
|
||||
caplog.clear()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for test_exception in (
|
||||
plexapi.exceptions.BadRequest,
|
||||
requests.exceptions.RequestException,
|
||||
):
|
||||
with patch.object(
|
||||
mock_plex_server, "clients", side_effect=test_exception
|
||||
) as patched_clients_bad_request:
|
||||
await self.advance(const.DEBOUNCE_TIMEOUT)
|
||||
async_dispatcher_send(
|
||||
hass, const.PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert patched_clients_bad_request.called
|
||||
assert (
|
||||
f"Could not connect to Plex server: {mock_plex_server.friendlyName}"
|
||||
in self.caplog.text
|
||||
)
|
||||
self.caplog.clear()
|
||||
|
||||
|
||||
async def test_set_config_entry_unique_id(hass):
|
||||
@@ -251,22 +277,12 @@ async def test_unload_config_entry(hass):
|
||||
|
||||
assert loaded_server.plex_server == mock_plex_server
|
||||
|
||||
assert server_id in hass.data[const.DOMAIN][const.DISPATCHERS]
|
||||
assert server_id in hass.data[const.DOMAIN][const.WEBSOCKETS]
|
||||
assert (
|
||||
hass.data[const.DOMAIN][const.PLATFORMS_COMPLETED][server_id] == const.PLATFORMS
|
||||
)
|
||||
|
||||
with patch("homeassistant.components.plex.PlexWebsocket.close") as mock_close:
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
assert mock_close.called
|
||||
|
||||
assert entry.state == ENTRY_STATE_NOT_LOADED
|
||||
|
||||
assert server_id not in hass.data[const.DOMAIN][const.SERVERS]
|
||||
assert server_id not in hass.data[const.DOMAIN][const.DISPATCHERS]
|
||||
assert server_id not in hass.data[const.DOMAIN][const.WEBSOCKETS]
|
||||
|
||||
|
||||
async def test_setup_with_photo_session(hass):
|
||||
"""Test setup component with config."""
|
||||
@@ -292,7 +308,8 @@ async def test_setup_with_photo_session(hass):
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
async_dispatcher_send(hass, const.PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
media_player = hass.states.get("media_player.plex_product_title")
|
||||
assert media_player.state == "idle"
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""Tests for Plex server."""
|
||||
import copy
|
||||
from datetime import timedelta
|
||||
|
||||
from asynctest import patch
|
||||
from asynctest import ClockedTestCase, patch
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
|
||||
from homeassistant.components.plex.const import (
|
||||
@@ -14,13 +13,11 @@ from homeassistant.components.plex.const import (
|
||||
SERVERS,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .common import trigger_plex_update
|
||||
from .const import DEFAULT_DATA, DEFAULT_OPTIONS
|
||||
from .mock_classes import MockPlexServer
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_test_home_assistant, mock_storage
|
||||
|
||||
|
||||
async def test_new_users_available(hass):
|
||||
@@ -48,7 +45,8 @@ async def test_new_users_available(hass):
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users
|
||||
|
||||
@@ -86,7 +84,8 @@ async def test_new_ignored_users_available(hass, caplog):
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users
|
||||
|
||||
@@ -100,72 +99,109 @@ async def test_new_ignored_users_available(hass, caplog):
|
||||
assert sensor.state == str(len(mock_plex_server.accounts))
|
||||
|
||||
|
||||
async def test_mark_sessions_idle(hass):
|
||||
"""Test marking media_players as idle when sessions end."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
class TestClockedPlex(ClockedTestCase):
|
||||
"""Create clock-controlled asynctest class."""
|
||||
|
||||
mock_plex_server = MockPlexServer(config_entry=entry)
|
||||
async def setUp(self):
|
||||
"""Initialize this test class."""
|
||||
self.hass = await async_test_home_assistant(self.loop)
|
||||
self.mock_storage = mock_storage()
|
||||
self.mock_storage.__enter__()
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
async def tearDown(self):
|
||||
"""Clean up the HomeAssistant instance."""
|
||||
await self.hass.async_stop()
|
||||
self.mock_storage.__exit__(None, None, None)
|
||||
|
||||
async def test_mark_sessions_idle(self):
|
||||
"""Test marking media_players as idle when sessions end."""
|
||||
hass = self.hass
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
|
||||
mock_plex_server = MockPlexServer(config_entry=entry)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == str(len(mock_plex_server.accounts))
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
mock_plex_server.clear_clients()
|
||||
mock_plex_server.clear_sessions()
|
||||
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == str(len(mock_plex_server.accounts))
|
||||
|
||||
mock_plex_server.clear_clients()
|
||||
mock_plex_server.clear_sessions()
|
||||
|
||||
await trigger_plex_update(hass, server_id)
|
||||
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == "0"
|
||||
|
||||
|
||||
async def test_debouncer(hass, caplog):
|
||||
"""Test debouncer decorator logic."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
|
||||
mock_plex_server = MockPlexServer(config_entry=entry)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await self.advance(DEBOUNCE_TIMEOUT)
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
sensor = hass.states.get("sensor.plex_plex_server_1")
|
||||
assert sensor.state == "0"
|
||||
|
||||
# First two updates are skipped
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
async def test_debouncer(self):
|
||||
"""Test debouncer behavior."""
|
||||
hass = self.hass
|
||||
|
||||
next_update = dt_util.utcnow() + timedelta(seconds=DEBOUNCE_TIMEOUT)
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=DEFAULT_OPTIONS,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
|
||||
assert (
|
||||
caplog.text.count(f"Throttling update of {mock_plex_server.friendlyName}") == 2
|
||||
)
|
||||
mock_plex_server = MockPlexServer(config_entry=entry)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
server_id = mock_plex_server.machineIdentifier
|
||||
|
||||
with patch.object(mock_plex_server, "clients", return_value=[]), patch.object(
|
||||
mock_plex_server, "sessions", return_value=[]
|
||||
) as mock_update:
|
||||
# Called immediately
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 1
|
||||
|
||||
# Throttled
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 1
|
||||
|
||||
# Throttled
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 1
|
||||
|
||||
# Called from scheduler
|
||||
await self.advance(DEBOUNCE_TIMEOUT)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Throttled
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 2
|
||||
|
||||
# Called from scheduler
|
||||
await self.advance(DEBOUNCE_TIMEOUT)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_update.call_count == 3
|
||||
|
||||
@@ -39,13 +39,14 @@ async def test_sensors(hass):
|
||||
"energy_exported": 10429451.9916853,
|
||||
"energy_imported": 4824191.60668611,
|
||||
"instant_average_voltage": 120.650001525879,
|
||||
"unit_of_measurement": "kWh",
|
||||
"unit_of_measurement": "kW",
|
||||
"friendly_name": "Powerwall Site Now",
|
||||
"device_class": "power",
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
state = hass.states.get("sensor.powerwall_load_now")
|
||||
assert state.state == "1.971"
|
||||
@@ -54,13 +55,14 @@ async def test_sensors(hass):
|
||||
"energy_exported": 1056797.48917483,
|
||||
"energy_imported": 4692987.91889705,
|
||||
"instant_average_voltage": 120.650001525879,
|
||||
"unit_of_measurement": "kWh",
|
||||
"unit_of_measurement": "kW",
|
||||
"friendly_name": "Powerwall Load Now",
|
||||
"device_class": "power",
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
state = hass.states.get("sensor.powerwall_battery_now")
|
||||
assert state.state == "-8.55"
|
||||
@@ -69,13 +71,14 @@ async def test_sensors(hass):
|
||||
"energy_exported": 3620010,
|
||||
"energy_imported": 4216170,
|
||||
"instant_average_voltage": 240.56,
|
||||
"unit_of_measurement": "kWh",
|
||||
"unit_of_measurement": "kW",
|
||||
"friendly_name": "Powerwall Battery Now",
|
||||
"device_class": "power",
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
state = hass.states.get("sensor.powerwall_solar_now")
|
||||
assert state.state == "10.49"
|
||||
@@ -84,13 +87,14 @@ async def test_sensors(hass):
|
||||
"energy_exported": 9864205.82222448,
|
||||
"energy_imported": 28177.5358355867,
|
||||
"instant_average_voltage": 120.685001373291,
|
||||
"unit_of_measurement": "kWh",
|
||||
"unit_of_measurement": "kW",
|
||||
"friendly_name": "Powerwall Solar Now",
|
||||
"device_class": "power",
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
state = hass.states.get("sensor.powerwall_charge")
|
||||
assert state.state == "47.32"
|
||||
@@ -101,4 +105,5 @@ async def test_sensors(hass):
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Tests for light platform."""
|
||||
from typing import Callable, NamedTuple
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import Mock, PropertyMock, patch
|
||||
|
||||
from pyHS100 import SmartDeviceException
|
||||
import pytest
|
||||
@@ -16,7 +16,11 @@ from homeassistant.components.light import (
|
||||
ATTR_HS_COLOR,
|
||||
DOMAIN as LIGHT_DOMAIN,
|
||||
)
|
||||
from homeassistant.components.tplink.common import CONF_DISCOVERY, CONF_LIGHT
|
||||
from homeassistant.components.tplink.common import (
|
||||
CONF_DIMMER,
|
||||
CONF_DISCOVERY,
|
||||
CONF_LIGHT,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_HOST,
|
||||
@@ -41,6 +45,15 @@ class LightMockData(NamedTuple):
|
||||
get_emeter_monthly_mock: Mock
|
||||
|
||||
|
||||
class SmartSwitchMockData(NamedTuple):
|
||||
"""Mock smart switch data."""
|
||||
|
||||
sys_info: dict
|
||||
state_mock: Mock
|
||||
brightness_mock: Mock
|
||||
get_sysinfo_mock: Mock
|
||||
|
||||
|
||||
@pytest.fixture(name="light_mock_data")
|
||||
def light_mock_data_fixture() -> None:
|
||||
"""Create light mock data."""
|
||||
@@ -85,6 +98,7 @@ def light_mock_data_fixture() -> None:
|
||||
|
||||
light_state.update(state)
|
||||
light_state["dft_on_state"] = drt_on_state
|
||||
return light_state
|
||||
|
||||
set_light_state_patch = patch(
|
||||
"homeassistant.components.tplink.common.SmartBulb.set_light_state",
|
||||
@@ -151,6 +165,74 @@ def light_mock_data_fixture() -> None:
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="dimmer_switch_mock_data")
|
||||
def dimmer_switch_mock_data_fixture() -> None:
|
||||
"""Create dimmer switch mock data."""
|
||||
sys_info = {
|
||||
"sw_ver": "1.2.3",
|
||||
"hw_ver": "2.3.4",
|
||||
"mac": "aa:bb:cc:dd:ee:ff",
|
||||
"mic_mac": "00:11:22:33:44",
|
||||
"type": "switch",
|
||||
"hwId": "1234",
|
||||
"fwId": "4567",
|
||||
"oemId": "891011",
|
||||
"dev_name": "dimmer1",
|
||||
"rssi": 11,
|
||||
"latitude": "0",
|
||||
"longitude": "0",
|
||||
"is_color": False,
|
||||
"is_dimmable": True,
|
||||
"is_variable_color_temp": False,
|
||||
"model": "HS220",
|
||||
"alias": "dimmer1",
|
||||
"feature": ":",
|
||||
"relay_state": 1,
|
||||
"brightness": 13,
|
||||
}
|
||||
|
||||
def state(*args, **kwargs):
|
||||
nonlocal sys_info
|
||||
if len(args) == 0:
|
||||
return sys_info["relay_state"]
|
||||
if args[0] == "ON":
|
||||
sys_info["relay_state"] = 1
|
||||
else:
|
||||
sys_info["relay_state"] = 0
|
||||
|
||||
def brightness(*args, **kwargs):
|
||||
nonlocal sys_info
|
||||
if len(args) == 0:
|
||||
return sys_info["brightness"]
|
||||
if sys_info["brightness"] == 0:
|
||||
sys_info["relay_state"] = 0
|
||||
else:
|
||||
sys_info["relay_state"] = 1
|
||||
sys_info["brightness"] = args[0]
|
||||
|
||||
get_sysinfo_patch = patch(
|
||||
"homeassistant.components.tplink.common.SmartDevice.get_sysinfo",
|
||||
return_value=sys_info,
|
||||
)
|
||||
state_patch = patch(
|
||||
"homeassistant.components.tplink.common.SmartPlug.state",
|
||||
new_callable=PropertyMock,
|
||||
side_effect=state,
|
||||
)
|
||||
brightness_patch = patch(
|
||||
"homeassistant.components.tplink.common.SmartPlug.brightness",
|
||||
new_callable=PropertyMock,
|
||||
side_effect=brightness,
|
||||
)
|
||||
with brightness_patch as brightness_mock, state_patch as state_mock, get_sysinfo_patch as get_sysinfo_mock:
|
||||
yield SmartSwitchMockData(
|
||||
sys_info=sys_info,
|
||||
brightness_mock=brightness_mock,
|
||||
state_mock=state_mock,
|
||||
get_sysinfo_mock=get_sysinfo_mock,
|
||||
)
|
||||
|
||||
|
||||
async def update_entity(hass: HomeAssistant, entity_id: str) -> None:
|
||||
"""Run an update action for an entity."""
|
||||
await hass.services.async_call(
|
||||
@@ -159,6 +241,96 @@ async def update_entity(hass: HomeAssistant, entity_id: str) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_smartswitch(
|
||||
hass: HomeAssistant, dimmer_switch_mock_data: SmartSwitchMockData
|
||||
) -> None:
|
||||
"""Test function."""
|
||||
sys_info = dimmer_switch_mock_data.sys_info
|
||||
|
||||
await async_setup_component(hass, HA_DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
tplink.DOMAIN,
|
||||
{
|
||||
tplink.DOMAIN: {
|
||||
CONF_DISCOVERY: False,
|
||||
CONF_DIMMER: [{CONF_HOST: "123.123.123.123"}],
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("light.dimmer1")
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "light.dimmer1"},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await update_entity(hass, "light.dimmer1")
|
||||
|
||||
assert hass.states.get("light.dimmer1").state == "off"
|
||||
assert sys_info["relay_state"] == 0
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: "light.dimmer1", ATTR_BRIGHTNESS: 50},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await update_entity(hass, "light.dimmer1")
|
||||
|
||||
state = hass.states.get("light.dimmer1")
|
||||
assert state.state == "on"
|
||||
assert state.attributes["brightness"] == 48.45
|
||||
assert sys_info["relay_state"] == 1
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: "light.dimmer1", ATTR_BRIGHTNESS: 55},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await update_entity(hass, "light.dimmer1")
|
||||
|
||||
state = hass.states.get("light.dimmer1")
|
||||
assert state.state == "on"
|
||||
assert state.attributes["brightness"] == 53.55
|
||||
assert sys_info["brightness"] == 21
|
||||
|
||||
sys_info["relay_state"] = 0
|
||||
sys_info["brightness"] = 66
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "light.dimmer1"},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await update_entity(hass, "light.dimmer1")
|
||||
|
||||
state = hass.states.get("light.dimmer1")
|
||||
assert state.state == "off"
|
||||
|
||||
await hass.services.async_call(
|
||||
LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "light.dimmer1"}, blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await update_entity(hass, "light.dimmer1")
|
||||
|
||||
state = hass.states.get("light.dimmer1")
|
||||
assert state.state == "on"
|
||||
assert state.attributes["brightness"] == 168.3
|
||||
assert sys_info["brightness"] == 66
|
||||
|
||||
|
||||
async def test_light(hass: HomeAssistant, light_mock_data: LightMockData) -> None:
|
||||
"""Test function."""
|
||||
light_state = light_mock_data.light_state
|
||||
@@ -310,7 +482,7 @@ async def test_get_light_state_retry(
|
||||
if set_state_call_count == 1:
|
||||
raise SmartDeviceException()
|
||||
|
||||
light_mock_data.set_light_state(state_data)
|
||||
return light_mock_data.set_light_state(state_data)
|
||||
|
||||
light_mock_data.set_light_state_mock.side_effect = set_light_state_side_effect
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""The tests for the TTS component."""
|
||||
import ctypes
|
||||
import os
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -121,6 +120,7 @@ async def test_setup_component_and_test_service(hass, empty_cache_dir):
|
||||
] == "{}/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3".format(
|
||||
hass.config.api.base_url
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -153,6 +153,7 @@ async def test_setup_component_and_test_service_with_config_language(
|
||||
] == "{}/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_de_-_demo.mp3".format(
|
||||
hass.config.api.base_url
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_de_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -194,6 +195,7 @@ async def test_setup_component_and_test_service_with_service_language(
|
||||
] == "{}/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_de_-_demo.mp3".format(
|
||||
hass.config.api.base_url
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_de_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -221,6 +223,7 @@ async def test_setup_component_test_service_with_wrong_service_language(
|
||||
blocking=True,
|
||||
)
|
||||
assert len(calls) == 0
|
||||
await hass.async_block_till_done()
|
||||
assert not (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_lang_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -257,6 +260,7 @@ async def test_setup_component_and_test_service_with_service_options(
|
||||
] == "{}/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{}_demo.mp3".format(
|
||||
hass.config.api.base_url, opt_hash
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir
|
||||
/ f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{opt_hash}_demo.mp3"
|
||||
@@ -294,14 +298,11 @@ async def test_setup_component_and_test_with_service_options_def(hass, empty_cac
|
||||
] == "{}/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{}_demo.mp3".format(
|
||||
hass.config.api.base_url, opt_hash
|
||||
)
|
||||
assert os.path.isfile(
|
||||
os.path.join(
|
||||
empty_cache_dir,
|
||||
"42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{0}_demo.mp3".format(
|
||||
opt_hash
|
||||
),
|
||||
)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir
|
||||
/ f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{opt_hash}_demo.mp3"
|
||||
).is_file()
|
||||
|
||||
|
||||
async def test_setup_component_and_test_service_with_service_options_wrong(
|
||||
@@ -329,6 +330,7 @@ async def test_setup_component_and_test_service_with_service_options_wrong(
|
||||
opt_hash = ctypes.c_size_t(hash(frozenset({"speed": 1}))).value
|
||||
|
||||
assert len(calls) == 0
|
||||
await hass.async_block_till_done()
|
||||
assert not (
|
||||
empty_cache_dir
|
||||
/ f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de_{opt_hash}_demo.mp3"
|
||||
@@ -383,6 +385,7 @@ async def test_setup_component_and_test_service_clear_cache(hass, empty_cache_di
|
||||
# To make sure the file is persisted
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -391,6 +394,7 @@ async def test_setup_component_and_test_service_clear_cache(hass, empty_cache_di
|
||||
tts.DOMAIN, tts.SERVICE_CLEAR_CACHE, {}, blocking=True
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert not (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -520,6 +524,7 @@ async def test_setup_component_test_without_cache(hass, empty_cache_dir):
|
||||
blocking=True,
|
||||
)
|
||||
assert len(calls) == 1
|
||||
await hass.async_block_till_done()
|
||||
assert not (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3"
|
||||
).is_file()
|
||||
@@ -547,6 +552,7 @@ async def test_setup_component_test_with_cache_call_service_without_cache(
|
||||
blocking=True,
|
||||
)
|
||||
assert len(calls) == 1
|
||||
await hass.async_block_till_done()
|
||||
assert not (
|
||||
empty_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_demo.mp3"
|
||||
).is_file()
|
||||
|
||||
@@ -9,6 +9,7 @@ from homeassistant.components.media_player import DEVICE_CLASS_SPEAKER, DEVICE_C
|
||||
from homeassistant.components.vizio.config_flow import _get_config_schema
|
||||
from homeassistant.components.vizio.const import (
|
||||
CONF_APPS,
|
||||
CONF_APPS_TO_INCLUDE_OR_EXCLUDE,
|
||||
CONF_INCLUDE,
|
||||
CONF_VOLUME_STEP,
|
||||
DEFAULT_NAME,
|
||||
@@ -176,6 +177,39 @@ async def test_tv_options_flow_with_apps(hass: HomeAssistantType) -> None:
|
||||
assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]}
|
||||
|
||||
|
||||
async def test_tv_options_flow_start_with_volume(hass: HomeAssistantType) -> None:
|
||||
"""Test options config flow for TV with providing apps option after providing volume step in initial config."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=MOCK_USER_VALID_TV_CONFIG,
|
||||
options={CONF_VOLUME_STEP: VOLUME_STEP},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
assert entry.options
|
||||
assert entry.options == {CONF_VOLUME_STEP: VOLUME_STEP}
|
||||
assert CONF_APPS not in entry.options
|
||||
assert CONF_APPS_TO_INCLUDE_OR_EXCLUDE not in entry.options
|
||||
|
||||
result = await hass.config_entries.options.async_init(entry.entry_id, data=None)
|
||||
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "init"
|
||||
|
||||
options = {CONF_VOLUME_STEP: VOLUME_STEP}
|
||||
options.update(MOCK_INCLUDE_APPS)
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"], user_input=options
|
||||
)
|
||||
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == ""
|
||||
assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP
|
||||
assert CONF_APPS in result["data"]
|
||||
assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]}
|
||||
|
||||
|
||||
async def test_user_host_already_configured(
|
||||
hass: HomeAssistantType,
|
||||
vizio_connect: pytest.fixture,
|
||||
|
||||
@@ -539,3 +539,21 @@ async def async_test_zha_group_light_entity(
|
||||
await zha_group.async_add_members([device_light_3.ieee])
|
||||
await dev3_cluster_on_off.on()
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
|
||||
# make the group have only 1 member and now there should be no entity
|
||||
await zha_group.async_remove_members([device_light_2.ieee, device_light_3.ieee])
|
||||
assert len(zha_group.members) == 1
|
||||
assert hass.states.get(entity_id).state is None
|
||||
# make sure the entity registry entry is still there
|
||||
assert zha_gateway.ha_entity_registry.async_get(entity_id) is not None
|
||||
|
||||
# add a member back and ensure that the group entity was created again
|
||||
await zha_group.async_add_members([device_light_3.ieee])
|
||||
await dev3_cluster_on_off.on()
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
|
||||
# remove the group and ensure that there is no entity and that the entity registry is cleaned up
|
||||
assert zha_gateway.ha_entity_registry.async_get(entity_id) is not None
|
||||
await zha_gateway.async_remove_zigpy_group(zha_group.group_id)
|
||||
assert hass.states.get(entity_id).state is None
|
||||
assert zha_gateway.ha_entity_registry.async_get(entity_id) is None
|
||||
|
||||
@@ -100,13 +100,23 @@ def test_dimmer_turn_on(mock_openzwave):
|
||||
|
||||
node.reset_mock()
|
||||
|
||||
device.turn_on(**{ATTR_BRIGHTNESS: 224})
|
||||
|
||||
assert node.set_dimmer.called
|
||||
value_id, brightness = node.set_dimmer.mock_calls[0][1]
|
||||
|
||||
assert value_id == value.value_id
|
||||
assert brightness == 87 # round(224 / 255 * 99)
|
||||
|
||||
node.reset_mock()
|
||||
|
||||
device.turn_on(**{ATTR_BRIGHTNESS: 120})
|
||||
|
||||
assert node.set_dimmer.called
|
||||
value_id, brightness = node.set_dimmer.mock_calls[0][1]
|
||||
|
||||
assert value_id == value.value_id
|
||||
assert brightness == 46 # int(120 / 255 * 99)
|
||||
assert brightness == 47 # round(120 / 255 * 99)
|
||||
|
||||
with patch.object(light, "_LOGGER", MagicMock()) as mock_logger:
|
||||
device.turn_on(**{ATTR_TRANSITION: 35})
|
||||
|
||||
BIN
tests/fixtures/ipp/get-printer-attributes-error-0x0503.bin
vendored
Normal file
BIN
tests/fixtures/ipp/get-printer-attributes-error-0x0503.bin
vendored
Normal file
Binary file not shown.
47
tests/fixtures/nut/BACKUPSES600M1.json
vendored
Normal file
47
tests/fixtures/nut/BACKUPSES600M1.json
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"ups.realpower.nominal" : "330",
|
||||
"input.voltage" : "123.0",
|
||||
"ups.mfr" : "American Power Conversion",
|
||||
"driver.version" : "2.7.4",
|
||||
"ups.test.result" : "No test initiated",
|
||||
"input.voltage.nominal" : "120",
|
||||
"input.transfer.low" : "92",
|
||||
"driver.parameter.pollinterval" : "15",
|
||||
"driver.version.data" : "APC HID 0.96",
|
||||
"driver.parameter.pollfreq" : "30",
|
||||
"battery.mfr.date" : "2017/04/01",
|
||||
"ups.beeper.status" : "enabled",
|
||||
"battery.date" : "2001/09/25",
|
||||
"driver.name" : "usbhid-ups",
|
||||
"battery.charge" : "100",
|
||||
"ups.status" : "OL",
|
||||
"ups.model" : "Back-UPS ES 600M1",
|
||||
"battery.runtime.low" : "120",
|
||||
"ups.firmware" : "928.a5 .D",
|
||||
"ups.delay.shutdown" : "20",
|
||||
"device.model" : "Back-UPS ES 600M1",
|
||||
"device.serial" : "4B1713P32195 ",
|
||||
"input.sensitivity" : "medium",
|
||||
"ups.firmware.aux" : "a5 ",
|
||||
"input.transfer.reason" : "input voltage out of range",
|
||||
"ups.timer.reboot" : "0",
|
||||
"battery.voltage.nominal" : "12.0",
|
||||
"ups.vendorid" : "051d",
|
||||
"input.transfer.high" : "139",
|
||||
"battery.voltage" : "13.7",
|
||||
"battery.charge.low" : "10",
|
||||
"battery.type" : "PbAc",
|
||||
"ups.mfr.date" : "2017/04/01",
|
||||
"ups.timer.shutdown" : "-1",
|
||||
"device.mfr" : "American Power Conversion",
|
||||
"driver.parameter.port" : "auto",
|
||||
"battery.charge.warning" : "50",
|
||||
"device.type" : "ups",
|
||||
"driver.parameter.vendorid" : "051d",
|
||||
"ups.serial" : "4B1713P32195 ",
|
||||
"ups.load" : "22",
|
||||
"driver.version.internal" : "0.41",
|
||||
"battery.runtime" : "1968",
|
||||
"driver.parameter.synchronous" : "no",
|
||||
"ups.productid" : "0002"
|
||||
}
|
||||
43
tests/fixtures/nut/CP1500PFCLCD.json
vendored
Normal file
43
tests/fixtures/nut/CP1500PFCLCD.json
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"battery.runtime.low" : "300",
|
||||
"driver.parameter.port" : "auto",
|
||||
"ups.delay.shutdown" : "20",
|
||||
"driver.parameter.pollfreq" : "30",
|
||||
"ups.beeper.status" : "disabled",
|
||||
"input.voltage.nominal" : "120",
|
||||
"device.serial" : "000000000000",
|
||||
"ups.timer.shutdown" : "-60",
|
||||
"input.voltage" : "122.0",
|
||||
"ups.status" : "OL",
|
||||
"ups.model" : "CP1500PFCLCD",
|
||||
"device.mfr" : "CPS",
|
||||
"device.model" : "CP1500PFCLCD",
|
||||
"input.transfer.low" : "88",
|
||||
"battery.mfr.date" : "CPS",
|
||||
"driver.version" : "2.7.4",
|
||||
"driver.version.data" : "CyberPower HID 0.4",
|
||||
"driver.parameter.synchronous" : "no",
|
||||
"ups.realpower.nominal" : "900",
|
||||
"ups.productid" : "0501",
|
||||
"ups.mfr" : "CPS",
|
||||
"ups.vendorid" : "0764",
|
||||
"driver.version.internal" : "0.41",
|
||||
"output.voltage" : "138.0",
|
||||
"battery.runtime" : "10530",
|
||||
"device.type" : "ups",
|
||||
"battery.charge.low" : "10",
|
||||
"ups.timer.start" : "-60",
|
||||
"driver.parameter.pollinterval" : "15",
|
||||
"ups.load" : "0",
|
||||
"ups.serial" : "000000000000",
|
||||
"input.transfer.high" : "139",
|
||||
"battery.charge.warning" : "20",
|
||||
"battery.voltage.nominal" : "24",
|
||||
"driver.parameter.vendorid" : "0764",
|
||||
"driver.name" : "usbhid-ups",
|
||||
"battery.type" : "PbAcid",
|
||||
"ups.delay.start" : "30",
|
||||
"battery.voltage" : "24.0",
|
||||
"battery.charge" : "100",
|
||||
"ups.test.result" : "No test initiated"
|
||||
}
|
||||
Reference in New Issue
Block a user