mirror of
https://github.com/home-assistant/core.git
synced 2025-08-06 14:15:12 +02:00
Merge remote-tracking branch 'refs/remotes/home-assistant/dev' into dev
This commit is contained in:
@@ -442,7 +442,6 @@ omit =
|
||||
homeassistant/components/mychevy/*
|
||||
homeassistant/components/mycroft/*
|
||||
homeassistant/components/mycroft/notify.py
|
||||
homeassistant/components/myq/cover.py
|
||||
homeassistant/components/mysensors/*
|
||||
homeassistant/components/mystrom/binary_sensor.py
|
||||
homeassistant/components/mystrom/light.py
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "apprise",
|
||||
"name": "Apprise",
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"requirements": ["apprise==0.8.4"],
|
||||
"requirements": ["apprise==0.8.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@caronc"]
|
||||
}
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Use Bayesian Inference to trigger a binary sensor."""
|
||||
from collections import OrderedDict
|
||||
from itertools import chain
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -88,10 +87,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
def update_probability(prior, prob_true, prob_false):
|
||||
def update_probability(prior, prob_given_true, prob_given_false):
|
||||
"""Update probability using Bayes' rule."""
|
||||
numerator = prob_true * prior
|
||||
denominator = numerator + prob_false * (1 - prior)
|
||||
numerator = prob_given_true * prior
|
||||
denominator = numerator + prob_given_false * (1 - prior)
|
||||
probability = numerator / denominator
|
||||
return probability
|
||||
|
||||
@@ -127,84 +126,124 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
self.prior = prior
|
||||
self.probability = prior
|
||||
|
||||
self.current_obs = OrderedDict({})
|
||||
self.entity_obs_dict = []
|
||||
self.current_observations = OrderedDict({})
|
||||
|
||||
for obs in self._observations:
|
||||
if "entity_id" in obs:
|
||||
self.entity_obs_dict.append([obs.get("entity_id")])
|
||||
if "value_template" in obs:
|
||||
self.entity_obs_dict.append(
|
||||
list(obs.get(CONF_VALUE_TEMPLATE).extract_entities())
|
||||
)
|
||||
self.observations_by_entity = self._build_observations_by_entity()
|
||||
|
||||
to_observe = set()
|
||||
for obs in self._observations:
|
||||
if "entity_id" in obs:
|
||||
to_observe.update(set([obs.get("entity_id")]))
|
||||
if "value_template" in obs:
|
||||
to_observe.update(set(obs.get(CONF_VALUE_TEMPLATE).extract_entities()))
|
||||
self.entity_obs = {key: [] for key in to_observe}
|
||||
|
||||
for ind, obs in enumerate(self._observations):
|
||||
obs["id"] = ind
|
||||
if "entity_id" in obs:
|
||||
self.entity_obs[obs["entity_id"]].append(obs)
|
||||
if "value_template" in obs:
|
||||
for ent in obs.get(CONF_VALUE_TEMPLATE).extract_entities():
|
||||
self.entity_obs[ent].append(obs)
|
||||
|
||||
self.watchers = {
|
||||
self.observation_handlers = {
|
||||
"numeric_state": self._process_numeric_state,
|
||||
"state": self._process_state,
|
||||
"template": self._process_template,
|
||||
}
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Call when entity about to be added."""
|
||||
"""
|
||||
Call when entity about to be added.
|
||||
|
||||
All relevant update logic for instance attributes occurs within this closure.
|
||||
Other methods in this class are designed to avoid directly modifying instance
|
||||
attributes, by instead focusing on returning relevant data back to this method.
|
||||
|
||||
The goal of this method is to ensure that `self.current_observations` and `self.probability`
|
||||
are set on a best-effort basis when this entity is register with hass.
|
||||
|
||||
In addition, this method must register the state listener defined within, which
|
||||
will be called any time a relevant entity changes its state.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def async_threshold_sensor_state_listener(entity, old_state, new_state):
|
||||
"""Handle sensor state changes."""
|
||||
def async_threshold_sensor_state_listener(entity, _old_state, new_state):
|
||||
"""
|
||||
Handle sensor state changes.
|
||||
|
||||
When a state changes, we must update our list of current observations,
|
||||
then calculate the new probability.
|
||||
"""
|
||||
if new_state.state == STATE_UNKNOWN:
|
||||
return
|
||||
|
||||
entity_obs_list = self.entity_obs[entity]
|
||||
|
||||
for entity_obs in entity_obs_list:
|
||||
platform = entity_obs["platform"]
|
||||
|
||||
self.watchers[platform](entity_obs)
|
||||
|
||||
prior = self.prior
|
||||
for obs in self.current_obs.values():
|
||||
prior = update_probability(prior, obs["prob_true"], obs["prob_false"])
|
||||
self.probability = prior
|
||||
self.current_observations.update(self._record_entity_observations(entity))
|
||||
self.probability = self._calculate_new_probability()
|
||||
|
||||
self.hass.async_add_job(self.async_update_ha_state, True)
|
||||
|
||||
self.current_observations.update(self._initialize_current_observations())
|
||||
self.probability = self._calculate_new_probability()
|
||||
async_track_state_change(
|
||||
self.hass, self.entity_obs, async_threshold_sensor_state_listener
|
||||
self.hass,
|
||||
self.observations_by_entity,
|
||||
async_threshold_sensor_state_listener,
|
||||
)
|
||||
|
||||
def _update_current_obs(self, entity_observation, should_trigger):
|
||||
"""Update current observation."""
|
||||
obs_id = entity_observation["id"]
|
||||
def _initialize_current_observations(self):
|
||||
local_observations = OrderedDict({})
|
||||
for entity in self.observations_by_entity:
|
||||
local_observations.update(self._record_entity_observations(entity))
|
||||
return local_observations
|
||||
|
||||
if should_trigger:
|
||||
prob_true = entity_observation["prob_given_true"]
|
||||
prob_false = entity_observation.get("prob_given_false", 1 - prob_true)
|
||||
def _record_entity_observations(self, entity):
|
||||
local_observations = OrderedDict({})
|
||||
entity_obs_list = self.observations_by_entity[entity]
|
||||
|
||||
self.current_obs[obs_id] = {
|
||||
"prob_true": prob_true,
|
||||
"prob_false": prob_false,
|
||||
}
|
||||
for entity_obs in entity_obs_list:
|
||||
platform = entity_obs["platform"]
|
||||
|
||||
else:
|
||||
self.current_obs.pop(obs_id, None)
|
||||
should_trigger = self.observation_handlers[platform](entity_obs)
|
||||
|
||||
if should_trigger:
|
||||
obs_entry = {"entity_id": entity, **entity_obs}
|
||||
else:
|
||||
obs_entry = None
|
||||
|
||||
local_observations[entity_obs["id"]] = obs_entry
|
||||
|
||||
return local_observations
|
||||
|
||||
def _calculate_new_probability(self):
|
||||
prior = self.prior
|
||||
|
||||
for obs in self.current_observations.values():
|
||||
if obs is not None:
|
||||
prior = update_probability(
|
||||
prior,
|
||||
obs["prob_given_true"],
|
||||
obs.get("prob_given_false", 1 - obs["prob_given_true"]),
|
||||
)
|
||||
|
||||
return prior
|
||||
|
||||
def _build_observations_by_entity(self):
|
||||
"""
|
||||
Build and return data structure of the form below.
|
||||
|
||||
{
|
||||
"sensor.sensor1": [{"id": 0, ...}, {"id": 1, ...}],
|
||||
"sensor.sensor2": [{"id": 2, ...}],
|
||||
...
|
||||
}
|
||||
|
||||
Each "observation" must be recognized uniquely, and it should be possible
|
||||
for all relevant observations to be looked up via their `entity_id`.
|
||||
"""
|
||||
|
||||
observations_by_entity = {}
|
||||
for ind, obs in enumerate(self._observations):
|
||||
obs["id"] = ind
|
||||
|
||||
if "entity_id" in obs:
|
||||
entity_ids = [obs["entity_id"]]
|
||||
elif "value_template" in obs:
|
||||
entity_ids = obs.get(CONF_VALUE_TEMPLATE).extract_entities()
|
||||
|
||||
for e_id in entity_ids:
|
||||
obs_list = observations_by_entity.get(e_id, [])
|
||||
obs_list.append(obs)
|
||||
observations_by_entity[e_id] = obs_list
|
||||
|
||||
return observations_by_entity
|
||||
|
||||
def _process_numeric_state(self, entity_observation):
|
||||
"""Add entity to current_obs if numeric state conditions are met."""
|
||||
"""Return True if numeric condition is met."""
|
||||
entity = entity_observation["entity_id"]
|
||||
|
||||
should_trigger = condition.async_numeric_state(
|
||||
@@ -215,27 +254,26 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
None,
|
||||
entity_observation,
|
||||
)
|
||||
|
||||
self._update_current_obs(entity_observation, should_trigger)
|
||||
return should_trigger
|
||||
|
||||
def _process_state(self, entity_observation):
|
||||
"""Add entity to current observations if state conditions are met."""
|
||||
"""Return True if state conditions are met."""
|
||||
entity = entity_observation["entity_id"]
|
||||
|
||||
should_trigger = condition.state(
|
||||
self.hass, entity, entity_observation.get("to_state")
|
||||
)
|
||||
|
||||
self._update_current_obs(entity_observation, should_trigger)
|
||||
return should_trigger
|
||||
|
||||
def _process_template(self, entity_observation):
|
||||
"""Add entity to current_obs if template is true."""
|
||||
"""Return True if template condition is True."""
|
||||
template = entity_observation.get(CONF_VALUE_TEMPLATE)
|
||||
template.hass = self.hass
|
||||
should_trigger = condition.async_template(
|
||||
self.hass, template, entity_observation
|
||||
)
|
||||
self._update_current_obs(entity_observation, should_trigger)
|
||||
return should_trigger
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -260,13 +298,15 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
print(self.current_observations)
|
||||
print(self.observations_by_entity)
|
||||
return {
|
||||
ATTR_OBSERVATIONS: list(self.current_obs.values()),
|
||||
ATTR_OBSERVATIONS: list(self.current_observations.values()),
|
||||
ATTR_OCCURRED_OBSERVATION_ENTITIES: list(
|
||||
set(
|
||||
chain.from_iterable(
|
||||
self.entity_obs_dict[obs] for obs in self.current_obs.keys()
|
||||
)
|
||||
obs.get("entity_id")
|
||||
for obs in self.current_observations.values()
|
||||
if obs is not None
|
||||
)
|
||||
),
|
||||
ATTR_PROBABILITY: round(self.probability, 2),
|
||||
|
@@ -86,9 +86,10 @@ class DeconzDevice(DeconzBase, Entity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.remove_callback(self.async_update_callback)
|
||||
del self.gateway.deconz_ids[self.entity_id]
|
||||
for unsub_dispatcher in self.listeners:
|
||||
unsub_dispatcher()
|
||||
if self.entity_id in self.gateway.deconz_ids:
|
||||
del self.gateway.deconz_ids[self.entity_id]
|
||||
for unsub_dispatcher in self.listeners:
|
||||
unsub_dispatcher()
|
||||
|
||||
async def async_remove_self(self, deconz_ids: list) -> None:
|
||||
"""Schedule removal of this entity.
|
||||
|
@@ -31,7 +31,7 @@ from .errors import AuthenticationRequired, CannotConnect
|
||||
@callback
|
||||
def get_gateway_from_config_entry(hass, config_entry):
|
||||
"""Return gateway with a matching bridge id."""
|
||||
return hass.data[DOMAIN][config_entry.unique_id]
|
||||
return hass.data[DOMAIN].get(config_entry.unique_id)
|
||||
|
||||
|
||||
class DeconzGateway:
|
||||
@@ -126,6 +126,8 @@ class DeconzGateway:
|
||||
Causes for this is either discovery updating host address or config entry options changing.
|
||||
"""
|
||||
gateway = get_gateway_from_config_entry(hass, entry)
|
||||
if not gateway:
|
||||
return
|
||||
if gateway.api.host != entry.data[CONF_HOST]:
|
||||
gateway.api.close()
|
||||
gateway.api.host = entry.data[CONF_HOST]
|
||||
|
@@ -1,19 +1,27 @@
|
||||
"""The DirecTV integration."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from typing import Dict
|
||||
from typing import Any, Dict
|
||||
|
||||
from DirectPy import DIRECTV
|
||||
from requests.exceptions import RequestException
|
||||
from directv import DIRECTV, DIRECTVError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.const import ATTR_NAME, CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DATA_CLIENT, DATA_LOCATIONS, DATA_VERSION_INFO, DEFAULT_PORT, DOMAIN
|
||||
from .const import (
|
||||
ATTR_IDENTIFIERS,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_SOFTWARE_VERSION,
|
||||
ATTR_VIA_DEVICE,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -28,21 +36,6 @@ PLATFORMS = ["media_player"]
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
def get_dtv_data(
|
||||
hass: HomeAssistant, host: str, port: int = DEFAULT_PORT, client_addr: str = "0"
|
||||
) -> dict:
|
||||
"""Retrieve a DIRECTV instance, locations list, and version info for the receiver device."""
|
||||
dtv = DIRECTV(host, port, client_addr, determine_state=False)
|
||||
locations = dtv.get_locations()
|
||||
version_info = dtv.get_version()
|
||||
|
||||
return {
|
||||
DATA_CLIENT: dtv,
|
||||
DATA_LOCATIONS: locations,
|
||||
DATA_VERSION_INFO: version_info,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
"""Set up the DirecTV component."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
@@ -60,14 +53,14 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up DirecTV from a config entry."""
|
||||
dtv = DIRECTV(entry.data[CONF_HOST], session=async_get_clientsession(hass))
|
||||
|
||||
try:
|
||||
dtv_data = await hass.async_add_executor_job(
|
||||
get_dtv_data, hass, entry.data[CONF_HOST]
|
||||
)
|
||||
except RequestException:
|
||||
await dtv.update()
|
||||
except DIRECTVError:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = dtv_data
|
||||
hass.data[DOMAIN][entry.entry_id] = dtv
|
||||
|
||||
for component in PLATFORMS:
|
||||
hass.async_create_task(
|
||||
@@ -92,3 +85,32 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
class DIRECTVEntity(Entity):
|
||||
"""Defines a base DirecTV entity."""
|
||||
|
||||
def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None:
|
||||
"""Initialize the DirecTV entity."""
|
||||
self._address = address
|
||||
self._device_id = address if address != "0" else dtv.device.info.receiver_id
|
||||
self._is_client = address != "0"
|
||||
self._name = name
|
||||
self.dtv = dtv
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def device_info(self) -> Dict[str, Any]:
|
||||
"""Return device information about this DirecTV receiver."""
|
||||
return {
|
||||
ATTR_IDENTIFIERS: {(DOMAIN, self._device_id)},
|
||||
ATTR_NAME: self.name,
|
||||
ATTR_MANUFACTURER: self.dtv.device.info.brand,
|
||||
ATTR_MODEL: None,
|
||||
ATTR_SOFTWARE_VERSION: self.dtv.device.info.version,
|
||||
ATTR_VIA_DEVICE: (DOMAIN, self.dtv.device.info.receiver_id),
|
||||
}
|
||||
|
@@ -3,18 +3,20 @@ import logging
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from DirectPy import DIRECTV
|
||||
from requests.exceptions import RequestException
|
||||
from directv import DIRECTV, DIRECTVError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.ssdp import ATTR_SSDP_LOCATION, ATTR_UPNP_SERIAL
|
||||
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import (
|
||||
ConfigType,
|
||||
DiscoveryInfoType,
|
||||
HomeAssistantType,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_PORT
|
||||
from .const import CONF_RECEIVER_ID
|
||||
from .const import DOMAIN # pylint: disable=unused-import
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -22,22 +24,17 @@ _LOGGER = logging.getLogger(__name__)
|
||||
ERROR_CANNOT_CONNECT = "cannot_connect"
|
||||
ERROR_UNKNOWN = "unknown"
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
def validate_input(data: Dict) -> Dict:
|
||||
async def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
dtv = DIRECTV(data["host"], DEFAULT_PORT, determine_state=False)
|
||||
version_info = dtv.get_version()
|
||||
session = async_get_clientsession(hass)
|
||||
directv = DIRECTV(data[CONF_HOST], session=session)
|
||||
device = await directv.update()
|
||||
|
||||
return {
|
||||
"title": data["host"],
|
||||
"host": data["host"],
|
||||
"receiver_id": "".join(version_info["receiverId"].split()),
|
||||
}
|
||||
return {CONF_RECEIVER_ID: device.info.receiver_id}
|
||||
|
||||
|
||||
class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -46,84 +43,91 @@ class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
|
||||
|
||||
@callback
|
||||
def _show_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
|
||||
"""Show the form to the user."""
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors or {},
|
||||
)
|
||||
def __init__(self):
|
||||
"""Set up the instance."""
|
||||
self.discovery_info = {}
|
||||
|
||||
async def async_step_import(
|
||||
self, user_input: Optional[Dict] = None
|
||||
self, user_input: Optional[ConfigType] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle a flow initialized by yaml file."""
|
||||
"""Handle a flow initiated by configuration file."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: Optional[Dict] = None
|
||||
self, user_input: Optional[ConfigType] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle a flow initialized by user."""
|
||||
if not user_input:
|
||||
return self._show_form()
|
||||
|
||||
errors = {}
|
||||
"""Handle a flow initiated by the user."""
|
||||
if user_input is None:
|
||||
return self._show_setup_form()
|
||||
|
||||
try:
|
||||
info = await self.hass.async_add_executor_job(validate_input, user_input)
|
||||
user_input[CONF_HOST] = info[CONF_HOST]
|
||||
except RequestException:
|
||||
errors["base"] = ERROR_CANNOT_CONNECT
|
||||
return self._show_form(errors)
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except DIRECTVError:
|
||||
return self._show_setup_form({"base": ERROR_CANNOT_CONNECT})
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason=ERROR_UNKNOWN)
|
||||
|
||||
await self.async_set_unique_id(info["receiver_id"])
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input[CONF_RECEIVER_ID] = info[CONF_RECEIVER_ID]
|
||||
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
await self.async_set_unique_id(user_input[CONF_RECEIVER_ID])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
return self.async_create_entry(title=user_input[CONF_HOST], data=user_input)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: Optional[DiscoveryInfoType] = None
|
||||
self, discovery_info: DiscoveryInfoType
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
"""Handle SSDP discovery."""
|
||||
host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname
|
||||
receiver_id = discovery_info[ATTR_UPNP_SERIAL][4:] # strips off RID-
|
||||
receiver_id = None
|
||||
|
||||
await self.async_set_unique_id(receiver_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
if discovery_info.get(ATTR_UPNP_SERIAL):
|
||||
receiver_id = discovery_info[ATTR_UPNP_SERIAL][4:] # strips off RID-
|
||||
|
||||
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
|
||||
self.context.update(
|
||||
{CONF_HOST: host, CONF_NAME: host, "title_placeholders": {"name": host}}
|
||||
self.context.update({"title_placeholders": {"name": host}})
|
||||
|
||||
self.discovery_info.update(
|
||||
{CONF_HOST: host, CONF_NAME: host, CONF_RECEIVER_ID: receiver_id}
|
||||
)
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, self.discovery_info)
|
||||
except DIRECTVError:
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason=ERROR_UNKNOWN)
|
||||
|
||||
self.discovery_info[CONF_RECEIVER_ID] = info[CONF_RECEIVER_ID]
|
||||
|
||||
await self.async_set_unique_id(self.discovery_info[CONF_RECEIVER_ID])
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self.discovery_info[CONF_HOST]}
|
||||
)
|
||||
|
||||
return await self.async_step_ssdp_confirm()
|
||||
|
||||
async def async_step_ssdp_confirm(
|
||||
self, user_input: Optional[Dict] = None
|
||||
self, user_input: ConfigType = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle user-confirmation of discovered device."""
|
||||
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
|
||||
name = self.context.get(CONF_NAME)
|
||||
"""Handle a confirmation flow initiated by SSDP."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="ssdp_confirm",
|
||||
description_placeholders={"name": self.discovery_info[CONF_NAME]},
|
||||
errors={},
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
|
||||
user_input[CONF_HOST] = self.context.get(CONF_HOST)
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(validate_input, user_input)
|
||||
return self.async_create_entry(title=name, data=user_input)
|
||||
except (OSError, RequestException):
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason=ERROR_UNKNOWN)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="ssdp_confirm", description_placeholders={"name": name},
|
||||
return self.async_create_entry(
|
||||
title=self.discovery_info[CONF_NAME], data=self.discovery_info,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
|
||||
"""Show the setup form to the user."""
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
errors=errors or {},
|
||||
)
|
||||
|
@@ -2,19 +2,19 @@
|
||||
|
||||
DOMAIN = "directv"
|
||||
|
||||
# Attributes
|
||||
ATTR_IDENTIFIERS = "identifiers"
|
||||
ATTR_MANUFACTURER = "manufacturer"
|
||||
ATTR_MEDIA_CURRENTLY_RECORDING = "media_currently_recording"
|
||||
ATTR_MEDIA_RATING = "media_rating"
|
||||
ATTR_MEDIA_RECORDED = "media_recorded"
|
||||
ATTR_MEDIA_START_TIME = "media_start_time"
|
||||
ATTR_MODEL = "model"
|
||||
ATTR_SOFTWARE_VERSION = "sw_version"
|
||||
ATTR_VIA_DEVICE = "via_device"
|
||||
|
||||
DATA_CLIENT = "client"
|
||||
DATA_LOCATIONS = "locations"
|
||||
DATA_VERSION_INFO = "version_info"
|
||||
CONF_RECEIVER_ID = "receiver_id"
|
||||
|
||||
DEFAULT_DEVICE = "0"
|
||||
DEFAULT_MANUFACTURER = "DirecTV"
|
||||
DEFAULT_NAME = "DirecTV Receiver"
|
||||
DEFAULT_PORT = 8080
|
||||
|
||||
MODEL_HOST = "DirecTV Host"
|
||||
MODEL_CLIENT = "DirecTV Client"
|
||||
|
@@ -2,9 +2,10 @@
|
||||
"domain": "directv",
|
||||
"name": "DirecTV",
|
||||
"documentation": "https://www.home-assistant.io/integrations/directv",
|
||||
"requirements": ["directpy==0.7"],
|
||||
"requirements": ["directv==0.2.0"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@ctalkington"],
|
||||
"quality_scale": "gold",
|
||||
"config_flow": true,
|
||||
"ssdp": [
|
||||
{
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Support for the DirecTV receivers."""
|
||||
import logging
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from typing import Callable, List
|
||||
|
||||
from DirectPy import DIRECTV
|
||||
from requests.exceptions import RequestException
|
||||
import voluptuous as vol
|
||||
from directv import DIRECTV
|
||||
|
||||
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerDevice
|
||||
from homeassistant.components.media_player import MediaPlayerDevice
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
MEDIA_TYPE_MOVIE,
|
||||
@@ -21,34 +19,17 @@ from homeassistant.components.media_player.const import (
|
||||
SUPPORT_TURN_ON,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
STATE_OFF,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import DIRECTVEntity
|
||||
from .const import (
|
||||
ATTR_MEDIA_CURRENTLY_RECORDING,
|
||||
ATTR_MEDIA_RATING,
|
||||
ATTR_MEDIA_RECORDED,
|
||||
ATTR_MEDIA_START_TIME,
|
||||
DATA_CLIENT,
|
||||
DATA_LOCATIONS,
|
||||
DATA_VERSION_INFO,
|
||||
DEFAULT_DEVICE,
|
||||
DEFAULT_MANUFACTURER,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
MODEL_CLIENT,
|
||||
MODEL_HOST,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -73,15 +54,6 @@ SUPPORT_DTV_CLIENT = (
|
||||
| SUPPORT_PLAY
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistantType,
|
||||
@@ -89,139 +61,57 @@ async def async_setup_entry(
|
||||
async_add_entities: Callable[[List, bool], None],
|
||||
) -> bool:
|
||||
"""Set up the DirecTV config entry."""
|
||||
locations = hass.data[DOMAIN][entry.entry_id][DATA_LOCATIONS]
|
||||
version_info = hass.data[DOMAIN][entry.entry_id][DATA_VERSION_INFO]
|
||||
dtv = hass.data[DOMAIN][entry.entry_id]
|
||||
entities = []
|
||||
|
||||
for loc in locations["locations"]:
|
||||
if "locationName" not in loc or "clientAddr" not in loc:
|
||||
continue
|
||||
|
||||
if loc["clientAddr"] != "0":
|
||||
dtv = DIRECTV(
|
||||
entry.data[CONF_HOST],
|
||||
DEFAULT_PORT,
|
||||
loc["clientAddr"],
|
||||
determine_state=False,
|
||||
)
|
||||
else:
|
||||
dtv = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
|
||||
|
||||
for location in dtv.device.locations:
|
||||
entities.append(
|
||||
DirecTvDevice(
|
||||
str.title(loc["locationName"]), loc["clientAddr"], dtv, version_info,
|
||||
DIRECTVMediaPlayer(
|
||||
dtv=dtv, name=str.title(location.name), address=location.address,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class DirecTvDevice(MediaPlayerDevice):
|
||||
class DIRECTVMediaPlayer(DIRECTVEntity, MediaPlayerDevice):
|
||||
"""Representation of a DirecTV receiver on the network."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
device: str,
|
||||
dtv: DIRECTV,
|
||||
version_info: Optional[Dict] = None,
|
||||
enabled_default: bool = True,
|
||||
):
|
||||
"""Initialize the device."""
|
||||
self.dtv = dtv
|
||||
self._name = name
|
||||
self._unique_id = None
|
||||
self._is_standby = True
|
||||
self._current = None
|
||||
self._last_update = None
|
||||
self._paused = None
|
||||
self._last_position = None
|
||||
self._is_recorded = None
|
||||
self._is_client = device != "0"
|
||||
def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None:
|
||||
"""Initialize DirecTV media player."""
|
||||
super().__init__(
|
||||
dtv=dtv, name=name, address=address,
|
||||
)
|
||||
|
||||
self._assumed_state = None
|
||||
self._available = False
|
||||
self._enabled_default = enabled_default
|
||||
self._first_error_timestamp = None
|
||||
self._model = None
|
||||
self._receiver_id = None
|
||||
self._software_version = None
|
||||
self._is_recorded = None
|
||||
self._is_standby = True
|
||||
self._last_position = None
|
||||
self._last_update = None
|
||||
self._paused = None
|
||||
self._program = None
|
||||
self._state = None
|
||||
|
||||
if self._is_client:
|
||||
self._model = MODEL_CLIENT
|
||||
self._unique_id = device
|
||||
|
||||
if version_info:
|
||||
self._receiver_id = "".join(version_info["receiverId"].split())
|
||||
|
||||
if not self._is_client:
|
||||
self._unique_id = self._receiver_id
|
||||
self._model = MODEL_HOST
|
||||
self._software_version = version_info["stbSoftwareVersion"]
|
||||
|
||||
def update(self):
|
||||
async def async_update(self):
|
||||
"""Retrieve latest state."""
|
||||
_LOGGER.debug("%s: Updating status", self.entity_id)
|
||||
try:
|
||||
self._available = True
|
||||
self._is_standby = self.dtv.get_standby()
|
||||
if self._is_standby:
|
||||
self._current = None
|
||||
self._is_recorded = None
|
||||
self._paused = None
|
||||
self._assumed_state = False
|
||||
self._last_position = None
|
||||
self._last_update = None
|
||||
else:
|
||||
self._current = self.dtv.get_tuned()
|
||||
if self._current["status"]["code"] == 200:
|
||||
self._first_error_timestamp = None
|
||||
self._is_recorded = self._current.get("uniqueId") is not None
|
||||
self._paused = self._last_position == self._current["offset"]
|
||||
self._assumed_state = self._is_recorded
|
||||
self._last_position = self._current["offset"]
|
||||
self._last_update = (
|
||||
dt_util.utcnow()
|
||||
if not self._paused or self._last_update is None
|
||||
else self._last_update
|
||||
)
|
||||
else:
|
||||
# If an error is received then only set to unavailable if
|
||||
# this started at least 1 minute ago.
|
||||
log_message = f"{self.entity_id}: Invalid status {self._current['status']['code']} received"
|
||||
if self._check_state_available():
|
||||
_LOGGER.debug(log_message)
|
||||
else:
|
||||
_LOGGER.error(log_message)
|
||||
self._state = await self.dtv.state(self._address)
|
||||
self._available = self._state.available
|
||||
self._is_standby = self._state.standby
|
||||
self._program = self._state.program
|
||||
|
||||
except RequestException as exception:
|
||||
_LOGGER.error(
|
||||
"%s: Request error trying to update current status: %s",
|
||||
self.entity_id,
|
||||
exception,
|
||||
)
|
||||
self._check_state_available()
|
||||
|
||||
except Exception as exception:
|
||||
_LOGGER.error(
|
||||
"%s: Exception trying to update current status: %s",
|
||||
self.entity_id,
|
||||
exception,
|
||||
)
|
||||
self._available = False
|
||||
if not self._first_error_timestamp:
|
||||
self._first_error_timestamp = dt_util.utcnow()
|
||||
raise
|
||||
|
||||
def _check_state_available(self):
|
||||
"""Set to unavailable if issue been occurring over 1 minute."""
|
||||
if not self._first_error_timestamp:
|
||||
self._first_error_timestamp = dt_util.utcnow()
|
||||
else:
|
||||
tdelta = dt_util.utcnow() - self._first_error_timestamp
|
||||
if tdelta.total_seconds() >= 60:
|
||||
self._available = False
|
||||
|
||||
return self._available
|
||||
if self._is_standby:
|
||||
self._assumed_state = False
|
||||
self._is_recorded = None
|
||||
self._last_position = None
|
||||
self._last_update = None
|
||||
self._paused = None
|
||||
elif self._program is not None:
|
||||
self._paused = self._last_position == self._program.position
|
||||
self._is_recorded = self._program.recorded
|
||||
self._last_position = self._program.position
|
||||
self._last_update = self._state.at
|
||||
self._assumed_state = self._is_recorded
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
@@ -243,24 +133,10 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique ID to use for this media player."""
|
||||
return self._unique_id
|
||||
if self._address == "0":
|
||||
return self.dtv.device.info.receiver_id
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return device specific attributes."""
|
||||
return {
|
||||
"name": self.name,
|
||||
"identifiers": {(DOMAIN, self.unique_id)},
|
||||
"manufacturer": DEFAULT_MANUFACTURER,
|
||||
"model": self._model,
|
||||
"sw_version": self._software_version,
|
||||
"via_device": (DOMAIN, self._receiver_id),
|
||||
}
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self) -> bool:
|
||||
"""Return if the entity should be enabled when first added to the entity registry."""
|
||||
return self._enabled_default
|
||||
return self._address
|
||||
|
||||
# MediaPlayerDevice properties and methods
|
||||
@property
|
||||
@@ -290,29 +166,30 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
@property
|
||||
def media_content_id(self):
|
||||
"""Return the content ID of current playing media."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["programId"]
|
||||
return self._program.program_id
|
||||
|
||||
@property
|
||||
def media_content_type(self):
|
||||
"""Return the content type of current playing media."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
if "episodeTitle" in self._current:
|
||||
return MEDIA_TYPE_TVSHOW
|
||||
known_types = [MEDIA_TYPE_MOVIE, MEDIA_TYPE_TVSHOW]
|
||||
if self._program.program_type in known_types:
|
||||
return self._program.program_type
|
||||
|
||||
return MEDIA_TYPE_MOVIE
|
||||
|
||||
@property
|
||||
def media_duration(self):
|
||||
"""Return the duration of current playing media in seconds."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["duration"]
|
||||
return self._program.duration
|
||||
|
||||
@property
|
||||
def media_position(self):
|
||||
@@ -324,10 +201,7 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
|
||||
@property
|
||||
def media_position_updated_at(self):
|
||||
"""When was the position of the current playing media valid.
|
||||
|
||||
Returns value from homeassistant.util.dt.utcnow().
|
||||
"""
|
||||
"""When was the position of the current playing media valid."""
|
||||
if self._is_standby:
|
||||
return None
|
||||
|
||||
@@ -336,34 +210,34 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
@property
|
||||
def media_title(self):
|
||||
"""Return the title of current playing media."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["title"]
|
||||
return self._program.title
|
||||
|
||||
@property
|
||||
def media_series_title(self):
|
||||
"""Return the title of current episode of TV show."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current.get("episodeTitle")
|
||||
return self._program.episode_title
|
||||
|
||||
@property
|
||||
def media_channel(self):
|
||||
"""Return the channel current playing media."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return f"{self._current['callsign']} ({self._current['major']})"
|
||||
return f"{self._program.channel_name} ({self._program.channel})"
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
"""Name of the current input source."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["major"]
|
||||
return self._program.channel
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
@@ -373,18 +247,18 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
@property
|
||||
def media_currently_recording(self):
|
||||
"""If the media is currently being recorded or not."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["isRecording"]
|
||||
return self._program.recording
|
||||
|
||||
@property
|
||||
def media_rating(self):
|
||||
"""TV Rating of the current playing media."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return self._current["rating"]
|
||||
return self._program.rating
|
||||
|
||||
@property
|
||||
def media_recorded(self):
|
||||
@@ -397,53 +271,53 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
@property
|
||||
def media_start_time(self):
|
||||
"""Start time the program aired."""
|
||||
if self._is_standby:
|
||||
if self._is_standby or self._program is None:
|
||||
return None
|
||||
|
||||
return dt_util.as_local(dt_util.utc_from_timestamp(self._current["startTime"]))
|
||||
return dt_util.as_local(self._program.start_time)
|
||||
|
||||
def turn_on(self):
|
||||
async def async_turn_on(self):
|
||||
"""Turn on the receiver."""
|
||||
if self._is_client:
|
||||
raise NotImplementedError()
|
||||
|
||||
_LOGGER.debug("Turn on %s", self._name)
|
||||
self.dtv.key_press("poweron")
|
||||
await self.dtv.remote("poweron", self._address)
|
||||
|
||||
def turn_off(self):
|
||||
async def async_turn_off(self):
|
||||
"""Turn off the receiver."""
|
||||
if self._is_client:
|
||||
raise NotImplementedError()
|
||||
|
||||
_LOGGER.debug("Turn off %s", self._name)
|
||||
self.dtv.key_press("poweroff")
|
||||
await self.dtv.remote("poweroff", self._address)
|
||||
|
||||
def media_play(self):
|
||||
async def async_media_play(self):
|
||||
"""Send play command."""
|
||||
_LOGGER.debug("Play on %s", self._name)
|
||||
self.dtv.key_press("play")
|
||||
await self.dtv.remote("play", self._address)
|
||||
|
||||
def media_pause(self):
|
||||
async def async_media_pause(self):
|
||||
"""Send pause command."""
|
||||
_LOGGER.debug("Pause on %s", self._name)
|
||||
self.dtv.key_press("pause")
|
||||
await self.dtv.remote("pause", self._address)
|
||||
|
||||
def media_stop(self):
|
||||
async def async_media_stop(self):
|
||||
"""Send stop command."""
|
||||
_LOGGER.debug("Stop on %s", self._name)
|
||||
self.dtv.key_press("stop")
|
||||
await self.dtv.remote("stop", self._address)
|
||||
|
||||
def media_previous_track(self):
|
||||
async def async_media_previous_track(self):
|
||||
"""Send rewind command."""
|
||||
_LOGGER.debug("Rewind on %s", self._name)
|
||||
self.dtv.key_press("rew")
|
||||
await self.dtv.remote("rew", self._address)
|
||||
|
||||
def media_next_track(self):
|
||||
async def async_media_next_track(self):
|
||||
"""Send fast forward command."""
|
||||
_LOGGER.debug("Fast forward on %s", self._name)
|
||||
self.dtv.key_press("ffwd")
|
||||
await self.dtv.remote("ffwd", self._address)
|
||||
|
||||
def play_media(self, media_type, media_id, **kwargs):
|
||||
async def async_play_media(self, media_type, media_id, **kwargs):
|
||||
"""Select input source."""
|
||||
if media_type != MEDIA_TYPE_CHANNEL:
|
||||
_LOGGER.error(
|
||||
@@ -454,4 +328,4 @@ class DirecTvDevice(MediaPlayerDevice):
|
||||
return
|
||||
|
||||
_LOGGER.debug("Changing channel on %s to %s", self._name, media_id)
|
||||
self.dtv.tune_channel(media_id)
|
||||
await self.dtv.tune(media_id, self._address)
|
||||
|
@@ -1,34 +1,36 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This DoorBird is already configured"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect, please try again",
|
||||
"invalid_auth": "Invalid authentication",
|
||||
"unknown": "Unexpected error"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host (IP Address)",
|
||||
"name": "Device Name",
|
||||
"password": "Password",
|
||||
"username": "Username"
|
||||
},
|
||||
"title": "Connect to the DoorBird"
|
||||
"options" : {
|
||||
"step" : {
|
||||
"init" : {
|
||||
"data" : {
|
||||
"events" : "Comma separated list of events."
|
||||
},
|
||||
"description" : "Add an comma separated event name for each event you wish to track. After entering them here, use the DoorBird app to assign them to a specific event. See the documentation at https://www.home-assistant.io/integrations/doorbird/#events. Example: somebody_pressed_the_button, motion"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config" : {
|
||||
"step" : {
|
||||
"user" : {
|
||||
"title" : "Connect to the DoorBird",
|
||||
"data" : {
|
||||
"password" : "Password",
|
||||
"host" : "Host (IP Address)",
|
||||
"name" : "Device Name",
|
||||
"username" : "Username"
|
||||
}
|
||||
},
|
||||
"title": "DoorBird"
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"events": "Comma separated list of events."
|
||||
},
|
||||
"description": "Add an comma separated event name for each event you wish to track. After entering them here, use the DoorBird app to assign them to a specific event. See the documentation at https://www.home-assistant.io/integrations/doorbird/#events. Example: somebody_pressed_the_button, motion"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort" : {
|
||||
"already_configured" : "This DoorBird is already configured",
|
||||
"link_local_address": "Link local addresses are not supported",
|
||||
"not_doorbird_device": "This device is not a DoorBird"
|
||||
},
|
||||
"title" : "DoorBird",
|
||||
"error" : {
|
||||
"invalid_auth" : "Invalid authentication",
|
||||
"unknown" : "Unexpected error",
|
||||
"cannot_connect" : "Failed to connect, please try again"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -90,6 +90,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if macaddress[:6] != DOORBIRD_OUI:
|
||||
return self.async_abort(reason="not_doorbird_device")
|
||||
if discovery_info[CONF_HOST].startswith("169.254"):
|
||||
return self.async_abort(reason="link_local_address")
|
||||
|
||||
await self.async_set_unique_id(macaddress)
|
||||
|
||||
|
@@ -22,7 +22,9 @@
|
||||
}
|
||||
},
|
||||
"abort" : {
|
||||
"already_configured" : "This DoorBird is already configured"
|
||||
"already_configured" : "This DoorBird is already configured",
|
||||
"link_local_address": "Link local addresses are not supported",
|
||||
"not_doorbird_device": "This device is not a DoorBird"
|
||||
},
|
||||
"title" : "DoorBird",
|
||||
"error" : {
|
||||
|
@@ -131,6 +131,24 @@ async def async_devices_query(hass, data, payload):
|
||||
return {"devices": devices}
|
||||
|
||||
|
||||
async def _entity_execute(entity, data, executions):
|
||||
"""Execute all commands for an entity.
|
||||
|
||||
Returns a dict if a special result needs to be set.
|
||||
"""
|
||||
for execution in executions:
|
||||
try:
|
||||
await entity.execute(data, execution)
|
||||
except SmartHomeError as err:
|
||||
return {
|
||||
"ids": [entity.entity_id],
|
||||
"status": "ERROR",
|
||||
**err.to_response(),
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@HANDLERS.register("action.devices.EXECUTE")
|
||||
async def handle_devices_execute(hass, data, payload):
|
||||
"""Handle action.devices.EXECUTE request.
|
||||
@@ -138,6 +156,7 @@ async def handle_devices_execute(hass, data, payload):
|
||||
https://developers.google.com/assistant/smarthome/develop/process-intents#EXECUTE
|
||||
"""
|
||||
entities = {}
|
||||
executions = {}
|
||||
results = {}
|
||||
|
||||
for command in payload["commands"]:
|
||||
@@ -159,27 +178,33 @@ async def handle_devices_execute(hass, data, payload):
|
||||
if entity_id in results:
|
||||
continue
|
||||
|
||||
if entity_id not in entities:
|
||||
state = hass.states.get(entity_id)
|
||||
if entity_id in entities:
|
||||
executions[entity_id].append(execution)
|
||||
continue
|
||||
|
||||
if state is None:
|
||||
results[entity_id] = {
|
||||
"ids": [entity_id],
|
||||
"status": "ERROR",
|
||||
"errorCode": ERR_DEVICE_OFFLINE,
|
||||
}
|
||||
continue
|
||||
state = hass.states.get(entity_id)
|
||||
|
||||
entities[entity_id] = GoogleEntity(hass, data.config, state)
|
||||
|
||||
try:
|
||||
await entities[entity_id].execute(data, execution)
|
||||
except SmartHomeError as err:
|
||||
if state is None:
|
||||
results[entity_id] = {
|
||||
"ids": [entity_id],
|
||||
"status": "ERROR",
|
||||
**err.to_response(),
|
||||
"errorCode": ERR_DEVICE_OFFLINE,
|
||||
}
|
||||
continue
|
||||
|
||||
entities[entity_id] = GoogleEntity(hass, data.config, state)
|
||||
executions[entity_id] = [execution]
|
||||
|
||||
execute_results = await asyncio.gather(
|
||||
*[
|
||||
_entity_execute(entities[entity_id], data, executions[entity_id])
|
||||
for entity_id in executions
|
||||
]
|
||||
)
|
||||
|
||||
for entity_id, result in zip(executions, execute_results):
|
||||
if result is not None:
|
||||
results[entity_id] = result
|
||||
|
||||
final_results = list(results.values())
|
||||
|
||||
|
@@ -142,6 +142,7 @@ CHAR_SWING_MODE = "SwingMode"
|
||||
CHAR_TARGET_DOOR_STATE = "TargetDoorState"
|
||||
CHAR_TARGET_HEATING_COOLING = "TargetHeatingCoolingState"
|
||||
CHAR_TARGET_POSITION = "TargetPosition"
|
||||
CHAR_TARGET_HUMIDITY = "TargetRelativeHumidity"
|
||||
CHAR_TARGET_SECURITY_STATE = "SecuritySystemTargetState"
|
||||
CHAR_TARGET_TEMPERATURE = "TargetTemperature"
|
||||
CHAR_TARGET_TILT_ANGLE = "TargetHorizontalTiltAngle"
|
||||
|
@@ -4,11 +4,14 @@ import logging
|
||||
from pyhap.const import CATEGORY_THERMOSTAT
|
||||
|
||||
from homeassistant.components.climate.const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_HVAC_MODE,
|
||||
ATTR_HVAC_MODES,
|
||||
ATTR_MAX_TEMP,
|
||||
ATTR_MIN_HUMIDITY,
|
||||
ATTR_MIN_TEMP,
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
@@ -17,6 +20,7 @@ from homeassistant.components.climate.const import (
|
||||
CURRENT_HVAC_IDLE,
|
||||
CURRENT_HVAC_OFF,
|
||||
DEFAULT_MAX_TEMP,
|
||||
DEFAULT_MIN_HUMIDITY,
|
||||
DEFAULT_MIN_TEMP,
|
||||
DOMAIN as DOMAIN_CLIMATE,
|
||||
HVAC_MODE_AUTO,
|
||||
@@ -25,8 +29,10 @@ from homeassistant.components.climate.const import (
|
||||
HVAC_MODE_HEAT,
|
||||
HVAC_MODE_HEAT_COOL,
|
||||
HVAC_MODE_OFF,
|
||||
SERVICE_SET_HUMIDITY,
|
||||
SERVICE_SET_HVAC_MODE as SERVICE_SET_HVAC_MODE_THERMOSTAT,
|
||||
SERVICE_SET_TEMPERATURE as SERVICE_SET_TEMPERATURE_THERMOSTAT,
|
||||
SUPPORT_TARGET_HUMIDITY,
|
||||
SUPPORT_TARGET_TEMPERATURE_RANGE,
|
||||
)
|
||||
from homeassistant.components.water_heater import (
|
||||
@@ -39,6 +45,7 @@ from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
TEMP_CELSIUS,
|
||||
TEMP_FAHRENHEIT,
|
||||
UNIT_PERCENTAGE,
|
||||
)
|
||||
|
||||
from . import TYPES
|
||||
@@ -46,9 +53,11 @@ from .accessories import HomeAccessory, debounce
|
||||
from .const import (
|
||||
CHAR_COOLING_THRESHOLD_TEMPERATURE,
|
||||
CHAR_CURRENT_HEATING_COOLING,
|
||||
CHAR_CURRENT_HUMIDITY,
|
||||
CHAR_CURRENT_TEMPERATURE,
|
||||
CHAR_HEATING_THRESHOLD_TEMPERATURE,
|
||||
CHAR_TARGET_HEATING_COOLING,
|
||||
CHAR_TARGET_HUMIDITY,
|
||||
CHAR_TARGET_TEMPERATURE,
|
||||
CHAR_TEMP_DISPLAY_UNITS,
|
||||
DEFAULT_MAX_TEMP_WATER_HEATER,
|
||||
@@ -99,6 +108,10 @@ class Thermostat(HomeAccessory):
|
||||
self._flag_heatingthresh = False
|
||||
min_temp, max_temp = self.get_temperature_range()
|
||||
|
||||
min_humidity = self.hass.states.get(self.entity_id).attributes.get(
|
||||
ATTR_MIN_HUMIDITY, DEFAULT_MIN_HUMIDITY
|
||||
)
|
||||
|
||||
# Add additional characteristics if auto mode is supported
|
||||
self.chars = []
|
||||
state = self.hass.states.get(self.entity_id)
|
||||
@@ -109,6 +122,9 @@ class Thermostat(HomeAccessory):
|
||||
(CHAR_COOLING_THRESHOLD_TEMPERATURE, CHAR_HEATING_THRESHOLD_TEMPERATURE)
|
||||
)
|
||||
|
||||
if features & SUPPORT_TARGET_HUMIDITY:
|
||||
self.chars.extend((CHAR_TARGET_HUMIDITY, CHAR_CURRENT_HUMIDITY))
|
||||
|
||||
serv_thermostat = self.add_preload_service(SERV_THERMOSTAT, self.chars)
|
||||
|
||||
# Current mode characteristics
|
||||
@@ -193,6 +209,23 @@ class Thermostat(HomeAccessory):
|
||||
properties={PROP_MIN_VALUE: min_temp, PROP_MAX_VALUE: max_temp},
|
||||
setter_callback=self.set_heating_threshold,
|
||||
)
|
||||
self.char_target_humidity = None
|
||||
self.char_current_humidity = None
|
||||
if CHAR_TARGET_HUMIDITY in self.chars:
|
||||
self.char_target_humidity = serv_thermostat.configure_char(
|
||||
CHAR_TARGET_HUMIDITY,
|
||||
value=50,
|
||||
# We do not set a max humidity because
|
||||
# homekit currently has a bug that will show the lower bound
|
||||
# shifted upwards. For example if you have a max humidity
|
||||
# of 80% homekit will give you the options 20%-100% instead
|
||||
# of 0-80%
|
||||
properties={PROP_MIN_VALUE: min_humidity},
|
||||
setter_callback=self.set_target_humidity,
|
||||
)
|
||||
self.char_current_humidity = serv_thermostat.configure_char(
|
||||
CHAR_CURRENT_HUMIDITY, value=50,
|
||||
)
|
||||
|
||||
def get_temperature_range(self):
|
||||
"""Return min and max temperature range."""
|
||||
@@ -224,6 +257,15 @@ class Thermostat(HomeAccessory):
|
||||
DOMAIN_CLIMATE, SERVICE_SET_HVAC_MODE_THERMOSTAT, params, hass_value
|
||||
)
|
||||
|
||||
@debounce
|
||||
def set_target_humidity(self, value):
|
||||
"""Set target humidity to value if call came from HomeKit."""
|
||||
_LOGGER.debug("%s: Set target humidity to %d", self.entity_id, value)
|
||||
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_HUMIDITY: value}
|
||||
self.call_service(
|
||||
DOMAIN_CLIMATE, SERVICE_SET_HUMIDITY, params, f"{value}{UNIT_PERCENTAGE}",
|
||||
)
|
||||
|
||||
@debounce
|
||||
def set_cooling_threshold(self, value):
|
||||
"""Set cooling threshold temp to value if call came from HomeKit."""
|
||||
@@ -288,6 +330,12 @@ class Thermostat(HomeAccessory):
|
||||
current_temp = temperature_to_homekit(current_temp, self._unit)
|
||||
self.char_current_temp.set_value(current_temp)
|
||||
|
||||
# Update current humidity
|
||||
if CHAR_CURRENT_HUMIDITY in self.chars:
|
||||
current_humdity = new_state.attributes.get(ATTR_CURRENT_HUMIDITY)
|
||||
if isinstance(current_humdity, (int, float)):
|
||||
self.char_current_humidity.set_value(current_humdity)
|
||||
|
||||
# Update target temperature
|
||||
target_temp = new_state.attributes.get(ATTR_TEMPERATURE)
|
||||
if isinstance(target_temp, (int, float)):
|
||||
@@ -296,6 +344,12 @@ class Thermostat(HomeAccessory):
|
||||
self.char_target_temp.set_value(target_temp)
|
||||
self._flag_temperature = False
|
||||
|
||||
# Update target humidity
|
||||
if CHAR_TARGET_HUMIDITY in self.chars:
|
||||
target_humdity = new_state.attributes.get(ATTR_HUMIDITY)
|
||||
if isinstance(target_humdity, (int, float)):
|
||||
self.char_target_humidity.set_value(target_humdity)
|
||||
|
||||
# Update cooling threshold temperature if characteristic exists
|
||||
if self.char_cooling_thresh_temp:
|
||||
cooling_thresh = new_state.attributes.get(ATTR_TARGET_TEMP_HIGH)
|
||||
|
93
homeassistant/components/hue/hue_event.py
Normal file
93
homeassistant/components/hue/hue_event.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Representation of a Hue remote firing events for button presses."""
|
||||
import logging
|
||||
|
||||
from aiohue.sensors import TYPE_ZGP_SWITCH, TYPE_ZLL_ROTARY, TYPE_ZLL_SWITCH
|
||||
|
||||
from homeassistant.const import CONF_EVENT, CONF_ID
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .sensor_device import GenericHueDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_HUE_EVENT = "hue_event"
|
||||
CONF_LAST_UPDATED = "last_updated"
|
||||
CONF_UNIQUE_ID = "unique_id"
|
||||
|
||||
EVENT_NAME_FORMAT = "{}"
|
||||
|
||||
|
||||
class HueEvent(GenericHueDevice):
|
||||
"""When you want signals instead of entities.
|
||||
|
||||
Stateless sensors such as remotes are expected to generate an event
|
||||
instead of a sensor entity in hass.
|
||||
"""
|
||||
|
||||
def __init__(self, sensor, name, bridge, primary_sensor=None):
|
||||
"""Register callback that will be used for signals."""
|
||||
super().__init__(sensor, name, bridge, primary_sensor)
|
||||
|
||||
self.event_id = slugify(self.sensor.name)
|
||||
# Use the 'lastupdated' string to detect new remote presses
|
||||
self._last_updated = self.sensor.lastupdated
|
||||
|
||||
# Register callback in coordinator and add job to remove it on bridge reset.
|
||||
self.bridge.sensor_manager.coordinator.async_add_listener(
|
||||
self.async_update_callback
|
||||
)
|
||||
self.bridge.reset_jobs.append(self.async_will_remove_from_hass)
|
||||
_LOGGER.debug("Hue event created: %s", self.event_id)
|
||||
|
||||
@callback
|
||||
def async_will_remove_from_hass(self):
|
||||
"""Remove listener on bridge reset."""
|
||||
self.bridge.sensor_manager.coordinator.async_remove_listener(
|
||||
self.async_update_callback
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_callback(self):
|
||||
"""Fire the event if reason is that state is updated."""
|
||||
if self.sensor.lastupdated == self._last_updated:
|
||||
return
|
||||
|
||||
# Extract the press code as state
|
||||
if hasattr(self.sensor, "rotaryevent"):
|
||||
state = self.sensor.rotaryevent
|
||||
else:
|
||||
state = self.sensor.buttonevent
|
||||
|
||||
self._last_updated = self.sensor.lastupdated
|
||||
|
||||
# Fire event
|
||||
data = {
|
||||
CONF_ID: self.event_id,
|
||||
CONF_UNIQUE_ID: self.unique_id,
|
||||
CONF_EVENT: state,
|
||||
CONF_LAST_UPDATED: self.sensor.lastupdated,
|
||||
}
|
||||
self.bridge.hass.bus.async_fire(CONF_HUE_EVENT, data)
|
||||
|
||||
async def async_update_device_registry(self):
|
||||
"""Update device registry."""
|
||||
device_registry = (
|
||||
await self.bridge.hass.helpers.device_registry.async_get_registry()
|
||||
)
|
||||
|
||||
entry = device_registry.async_get_or_create(
|
||||
config_entry_id=self.bridge.config_entry.entry_id, **self.device_info
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Event registry with entry_id: %s and device_id: %s",
|
||||
entry.id,
|
||||
self.device_id,
|
||||
)
|
||||
|
||||
|
||||
EVENT_CONFIG_MAP = {
|
||||
TYPE_ZGP_SWITCH: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
|
||||
TYPE_ZLL_SWITCH: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
|
||||
TYPE_ZLL_ROTARY: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
|
||||
}
|
@@ -1,17 +1,25 @@
|
||||
"""Hue sensor entities."""
|
||||
from aiohue.sensors import TYPE_ZLL_LIGHTLEVEL, TYPE_ZLL_TEMPERATURE
|
||||
from aiohue.sensors import (
|
||||
TYPE_ZLL_LIGHTLEVEL,
|
||||
TYPE_ZLL_ROTARY,
|
||||
TYPE_ZLL_SWITCH,
|
||||
TYPE_ZLL_TEMPERATURE,
|
||||
)
|
||||
|
||||
from homeassistant.const import (
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
TEMP_CELSIUS,
|
||||
UNIT_PERCENTAGE,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN as HUE_DOMAIN
|
||||
from .sensor_base import SENSOR_CONFIG_MAP, GenericZLLSensor
|
||||
from .sensor_base import SENSOR_CONFIG_MAP, GenericHueSensor, GenericZLLSensor
|
||||
|
||||
LIGHT_LEVEL_NAME_FORMAT = "{} light level"
|
||||
REMOTE_NAME_FORMAT = "{} battery level"
|
||||
TEMPERATURE_NAME_FORMAT = "{} temperature"
|
||||
|
||||
|
||||
@@ -79,6 +87,30 @@ class HueTemperature(GenericHueGaugeSensorEntity):
|
||||
return self.sensor.temperature / 100
|
||||
|
||||
|
||||
class HueBattery(GenericHueSensor):
|
||||
"""Battery class for when a batt-powered device is only represented as an event."""
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique identifier for this device."""
|
||||
return f"{self.sensor.uniqueid}-battery"
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the battery."""
|
||||
return self.sensor.battery
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the sensor."""
|
||||
return DEVICE_CLASS_BATTERY
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement of this entity."""
|
||||
return UNIT_PERCENTAGE
|
||||
|
||||
|
||||
SENSOR_CONFIG_MAP.update(
|
||||
{
|
||||
TYPE_ZLL_LIGHTLEVEL: {
|
||||
@@ -91,5 +123,15 @@ SENSOR_CONFIG_MAP.update(
|
||||
"name_format": TEMPERATURE_NAME_FORMAT,
|
||||
"class": HueTemperature,
|
||||
},
|
||||
TYPE_ZLL_SWITCH: {
|
||||
"platform": "sensor",
|
||||
"name_format": REMOTE_NAME_FORMAT,
|
||||
"class": HueBattery,
|
||||
},
|
||||
TYPE_ZLL_ROTARY: {
|
||||
"platform": "sensor",
|
||||
"name_format": REMOTE_NAME_FORMAT,
|
||||
"class": HueBattery,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@@ -10,8 +10,10 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers import debounce, entity
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN as HUE_DOMAIN, REQUEST_REFRESH_DELAY
|
||||
from .const import REQUEST_REFRESH_DELAY
|
||||
from .helpers import remove_devices
|
||||
from .hue_event import EVENT_CONFIG_MAP
|
||||
from .sensor_device import GenericHueDevice
|
||||
|
||||
SENSOR_CONFIG_MAP = {}
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -38,6 +40,9 @@ class SensorManager:
|
||||
self.bridge = bridge
|
||||
self._component_add_entities = {}
|
||||
self.current = {}
|
||||
self.current_events = {}
|
||||
|
||||
self._enabled_platforms = ("binary_sensor", "sensor")
|
||||
self.coordinator = DataUpdateCoordinator(
|
||||
bridge.hass,
|
||||
_LOGGER,
|
||||
@@ -66,7 +71,8 @@ class SensorManager:
|
||||
"""Register async_add_entities methods for components."""
|
||||
self._component_add_entities[platform] = async_add_entities
|
||||
|
||||
if len(self._component_add_entities) < 2:
|
||||
if len(self._component_add_entities) < len(self._enabled_platforms):
|
||||
_LOGGER.debug("Aborting start with %s, waiting for the rest", platform)
|
||||
return
|
||||
|
||||
# We have all components available, start the updating.
|
||||
@@ -81,7 +87,7 @@ class SensorManager:
|
||||
"""Update sensors from the bridge."""
|
||||
api = self.bridge.api.sensors
|
||||
|
||||
if len(self._component_add_entities) < 2:
|
||||
if len(self._component_add_entities) < len(self._enabled_platforms):
|
||||
return
|
||||
|
||||
to_add = {}
|
||||
@@ -110,12 +116,24 @@ class SensorManager:
|
||||
# Iterate again now we have all the presence sensors, and add the
|
||||
# related sensors with nice names where appropriate.
|
||||
for item_id in api:
|
||||
existing = current.get(api[item_id].uniqueid)
|
||||
if existing is not None:
|
||||
uniqueid = api[item_id].uniqueid
|
||||
if current.get(uniqueid, self.current_events.get(uniqueid)) is not None:
|
||||
continue
|
||||
|
||||
primary_sensor = None
|
||||
sensor_config = SENSOR_CONFIG_MAP.get(api[item_id].type)
|
||||
sensor_type = api[item_id].type
|
||||
|
||||
# Check for event generator devices
|
||||
event_config = EVENT_CONFIG_MAP.get(sensor_type)
|
||||
if event_config is not None:
|
||||
base_name = api[item_id].name
|
||||
name = event_config["name_format"].format(base_name)
|
||||
new_event = event_config["class"](api[item_id], name, self.bridge)
|
||||
self.bridge.hass.async_create_task(
|
||||
new_event.async_update_device_registry()
|
||||
)
|
||||
self.current_events[uniqueid] = new_event
|
||||
|
||||
sensor_config = SENSOR_CONFIG_MAP.get(sensor_type)
|
||||
if sensor_config is None:
|
||||
continue
|
||||
|
||||
@@ -125,13 +143,11 @@ class SensorManager:
|
||||
base_name = primary_sensor.name
|
||||
name = sensor_config["name_format"].format(base_name)
|
||||
|
||||
current[api[item_id].uniqueid] = sensor_config["class"](
|
||||
current[uniqueid] = sensor_config["class"](
|
||||
api[item_id], name, self.bridge, primary_sensor=primary_sensor
|
||||
)
|
||||
|
||||
to_add.setdefault(sensor_config["platform"], []).append(
|
||||
current[api[item_id].uniqueid]
|
||||
)
|
||||
to_add.setdefault(sensor_config["platform"], []).append(current[uniqueid])
|
||||
|
||||
self.bridge.hass.async_create_task(
|
||||
remove_devices(
|
||||
@@ -143,53 +159,23 @@ class SensorManager:
|
||||
self._component_add_entities[platform](to_add[platform])
|
||||
|
||||
|
||||
class GenericHueSensor(entity.Entity):
|
||||
class GenericHueSensor(GenericHueDevice, entity.Entity):
|
||||
"""Representation of a Hue sensor."""
|
||||
|
||||
should_poll = False
|
||||
|
||||
def __init__(self, sensor, name, bridge, primary_sensor=None):
|
||||
"""Initialize the sensor."""
|
||||
self.sensor = sensor
|
||||
self._name = name
|
||||
self._primary_sensor = primary_sensor
|
||||
self.bridge = bridge
|
||||
|
||||
async def _async_update_ha_state(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def primary_sensor(self):
|
||||
"""Return the primary sensor entity of the physical device."""
|
||||
return self._primary_sensor or self.sensor
|
||||
|
||||
@property
|
||||
def device_id(self):
|
||||
"""Return the ID of the physical device this sensor is part of."""
|
||||
return self.unique_id[:23]
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the ID of this Hue sensor."""
|
||||
return self.sensor.uniqueid
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return a friendly name for the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return if sensor is available."""
|
||||
return self.bridge.sensor_manager.coordinator.last_update_success and (
|
||||
self.bridge.allow_unreachable or self.sensor.config["reachable"]
|
||||
self.bridge.allow_unreachable
|
||||
# remotes like Hue Tap (ZGPSwitchSensor) have no _reachability_
|
||||
or self.sensor.config.get("reachable", True)
|
||||
)
|
||||
|
||||
@property
|
||||
def swupdatestate(self):
|
||||
"""Return detail of available software updates for this device."""
|
||||
return self.primary_sensor.raw.get("swupdate", {}).get("state")
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""When entity is added to hass."""
|
||||
self.bridge.sensor_manager.coordinator.async_add_listener(
|
||||
@@ -209,21 +195,6 @@ class GenericHueSensor(entity.Entity):
|
||||
"""
|
||||
await self.bridge.sensor_manager.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info.
|
||||
|
||||
Links individual entities together in the hass device registry.
|
||||
"""
|
||||
return {
|
||||
"identifiers": {(HUE_DOMAIN, self.device_id)},
|
||||
"name": self.primary_sensor.name,
|
||||
"manufacturer": self.primary_sensor.manufacturername,
|
||||
"model": (self.primary_sensor.productname or self.primary_sensor.modelid),
|
||||
"sw_version": self.primary_sensor.swversion,
|
||||
"via_device": (HUE_DOMAIN, self.bridge.api.config.bridgeid),
|
||||
}
|
||||
|
||||
|
||||
class GenericZLLSensor(GenericHueSensor):
|
||||
"""Representation of a Hue-brand, physical sensor."""
|
||||
|
53
homeassistant/components/hue/sensor_device.py
Normal file
53
homeassistant/components/hue/sensor_device.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Support for the Philips Hue sensor devices."""
|
||||
from .const import DOMAIN as HUE_DOMAIN
|
||||
|
||||
|
||||
class GenericHueDevice:
|
||||
"""Representation of a Hue device."""
|
||||
|
||||
def __init__(self, sensor, name, bridge, primary_sensor=None):
|
||||
"""Initialize the sensor."""
|
||||
self.sensor = sensor
|
||||
self._name = name
|
||||
self._primary_sensor = primary_sensor
|
||||
self.bridge = bridge
|
||||
|
||||
@property
|
||||
def primary_sensor(self):
|
||||
"""Return the primary sensor entity of the physical device."""
|
||||
return self._primary_sensor or self.sensor
|
||||
|
||||
@property
|
||||
def device_id(self):
|
||||
"""Return the ID of the physical device this sensor is part of."""
|
||||
return self.unique_id[:23]
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the ID of this Hue sensor."""
|
||||
return self.sensor.uniqueid
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return a friendly name for the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def swupdatestate(self):
|
||||
"""Return detail of available software updates for this device."""
|
||||
return self.primary_sensor.raw.get("swupdate", {}).get("state")
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info.
|
||||
|
||||
Links individual entities together in the hass device registry.
|
||||
"""
|
||||
return {
|
||||
"identifiers": {(HUE_DOMAIN, self.device_id)},
|
||||
"name": self.primary_sensor.name,
|
||||
"manufacturer": self.primary_sensor.manufacturername,
|
||||
"model": (self.primary_sensor.productname or self.primary_sensor.modelid),
|
||||
"sw_version": self.primary_sensor.swversion,
|
||||
"via_device": (HUE_DOMAIN, self.bridge.api.config.bridgeid),
|
||||
}
|
@@ -205,7 +205,7 @@ class KNXModule:
|
||||
|
||||
def connection_config_tunneling(self):
|
||||
"""Return the connection_config if tunneling is configured."""
|
||||
gateway_ip = self.config[DOMAIN][CONF_KNX_TUNNELING].get(CONF_HOST)
|
||||
gateway_ip = self.config[DOMAIN][CONF_KNX_TUNNELING][CONF_HOST]
|
||||
gateway_port = self.config[DOMAIN][CONF_KNX_TUNNELING].get(CONF_PORT)
|
||||
local_ip = self.config[DOMAIN][CONF_KNX_TUNNELING].get(CONF_KNX_LOCAL_IP)
|
||||
if gateway_port is None:
|
||||
@@ -215,6 +215,7 @@ class KNXModule:
|
||||
gateway_ip=gateway_ip,
|
||||
gateway_port=gateway_port,
|
||||
local_ip=local_ip,
|
||||
auto_reconnect=True,
|
||||
)
|
||||
|
||||
def connection_config_auto(self):
|
||||
|
@@ -33,6 +33,9 @@
|
||||
"abort": {
|
||||
"not_konn_panel": "Not a recognized Konnected.io device"
|
||||
},
|
||||
"error": {
|
||||
"bad_host": "Invalid Override API host url"
|
||||
},
|
||||
"step": {
|
||||
"options_binary": {
|
||||
"data": {
|
||||
@@ -82,7 +85,9 @@
|
||||
},
|
||||
"options_misc": {
|
||||
"data": {
|
||||
"blink": "Blink panel LED on when sending state change"
|
||||
"api_host": "Override API host URL (optional)",
|
||||
"blink": "Blink panel LED on when sending state change",
|
||||
"override_api_host": "Override default Home Assistant API host panel URL"
|
||||
},
|
||||
"description": "Please select the desired behavior for your panel",
|
||||
"title": "Configure Misc"
|
||||
|
@@ -91,7 +91,7 @@ def ensure_zone(value):
|
||||
return str(value)
|
||||
|
||||
|
||||
def import_validator(config):
|
||||
def import_device_validator(config):
|
||||
"""Validate zones and reformat for import."""
|
||||
config = copy.deepcopy(config)
|
||||
io_cfgs = {}
|
||||
@@ -117,10 +117,22 @@ def import_validator(config):
|
||||
config.pop(CONF_SWITCHES, None)
|
||||
config.pop(CONF_BLINK, None)
|
||||
config.pop(CONF_DISCOVERY, None)
|
||||
config.pop(CONF_API_HOST, None)
|
||||
config.pop(CONF_IO, None)
|
||||
return config
|
||||
|
||||
|
||||
def import_validator(config):
|
||||
"""Reformat for import."""
|
||||
config = copy.deepcopy(config)
|
||||
|
||||
# push api_host into device configs
|
||||
for device in config.get(CONF_DEVICES, []):
|
||||
device[CONF_API_HOST] = config.get(CONF_API_HOST, "")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
# configuration.yaml schemas (legacy)
|
||||
BINARY_SENSOR_SCHEMA_YAML = vol.All(
|
||||
vol.Schema(
|
||||
@@ -179,23 +191,27 @@ DEVICE_SCHEMA_YAML = vol.All(
|
||||
vol.Inclusive(CONF_HOST, "host_info"): cv.string,
|
||||
vol.Inclusive(CONF_PORT, "host_info"): cv.port,
|
||||
vol.Optional(CONF_BLINK, default=True): cv.boolean,
|
||||
vol.Optional(CONF_API_HOST, default=""): vol.Any("", cv.url),
|
||||
vol.Optional(CONF_DISCOVERY, default=True): cv.boolean,
|
||||
}
|
||||
),
|
||||
import_validator,
|
||||
import_device_validator,
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCESS_TOKEN): cv.string,
|
||||
vol.Optional(CONF_API_HOST): vol.Url(),
|
||||
vol.Optional(CONF_DEVICES): vol.All(
|
||||
cv.ensure_list, [DEVICE_SCHEMA_YAML]
|
||||
),
|
||||
}
|
||||
DOMAIN: vol.All(
|
||||
import_validator,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCESS_TOKEN): cv.string,
|
||||
vol.Optional(CONF_API_HOST): vol.Url(),
|
||||
vol.Optional(CONF_DEVICES): vol.All(
|
||||
cv.ensure_list, [DEVICE_SCHEMA_YAML]
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
|
@@ -31,6 +31,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
CONF_ACTIVATION,
|
||||
CONF_API_HOST,
|
||||
CONF_BLINK,
|
||||
CONF_DEFAULT_OPTIONS,
|
||||
CONF_DISCOVERY,
|
||||
@@ -61,6 +62,8 @@ CONF_MORE_STATES = "more_states"
|
||||
CONF_YES = "Yes"
|
||||
CONF_NO = "No"
|
||||
|
||||
CONF_OVERRIDE_API_HOST = "override_api_host"
|
||||
|
||||
KONN_MANUFACTURER = "konnected.io"
|
||||
KONN_PANEL_MODEL_NAMES = {
|
||||
KONN_MODEL: "Konnected Alarm Panel",
|
||||
@@ -138,6 +141,7 @@ OPTIONS_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_SENSORS): vol.All(cv.ensure_list, [SENSOR_SCHEMA]),
|
||||
vol.Optional(CONF_SWITCHES): vol.All(cv.ensure_list, [SWITCH_SCHEMA]),
|
||||
vol.Optional(CONF_BLINK, default=True): cv.boolean,
|
||||
vol.Optional(CONF_API_HOST, default=""): vol.Any("", cv.url),
|
||||
vol.Optional(CONF_DISCOVERY, default=True): cv.boolean,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
@@ -785,8 +789,19 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Allow the user to configure the LED behavior."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
self.new_opt[CONF_BLINK] = user_input[CONF_BLINK]
|
||||
return self.async_create_entry(title="", data=self.new_opt)
|
||||
# config schema only does basic schema val so check url here
|
||||
try:
|
||||
if user_input[CONF_OVERRIDE_API_HOST]:
|
||||
cv.url(user_input.get(CONF_API_HOST, ""))
|
||||
else:
|
||||
user_input[CONF_API_HOST] = ""
|
||||
except vol.Invalid:
|
||||
errors["base"] = "bad_host"
|
||||
else:
|
||||
# no need to store the override - can infer
|
||||
del user_input[CONF_OVERRIDE_API_HOST]
|
||||
self.new_opt.update(user_input)
|
||||
return self.async_create_entry(title="", data=self.new_opt)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="options_misc",
|
||||
@@ -795,6 +810,13 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
vol.Required(
|
||||
CONF_BLINK, default=self.current_opt.get(CONF_BLINK, True)
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_OVERRIDE_API_HOST,
|
||||
default=bool(self.current_opt.get(CONF_API_HOST)),
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_API_HOST, default=self.current_opt.get(CONF_API_HOST, "")
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
|
@@ -294,7 +294,9 @@ class AlarmPanel:
|
||||
@callback
|
||||
def async_desired_settings_payload(self):
|
||||
"""Return a dict representing the desired device configuration."""
|
||||
desired_api_host = (
|
||||
# keeping self.hass.data check for backwards compatibility
|
||||
# newly configured integrations store this in the config entry
|
||||
desired_api_host = self.options.get(CONF_API_HOST) or (
|
||||
self.hass.data[DOMAIN].get(CONF_API_HOST) or self.hass.config.api.base_url
|
||||
)
|
||||
desired_api_endpoint = desired_api_host + ENDPOINT_ROOT
|
||||
|
@@ -94,11 +94,15 @@
|
||||
"title": "Configure Misc",
|
||||
"description": "Please select the desired behavior for your panel",
|
||||
"data": {
|
||||
"blink": "Blink panel LED on when sending state change"
|
||||
"blink": "Blink panel LED on when sending state change",
|
||||
"override_api_host": "Override default Home Assistant API host panel URL",
|
||||
"api_host": "Override API host URL (optional)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {},
|
||||
"error": {
|
||||
"bad_host": "Invalid Override API host url"
|
||||
},
|
||||
"abort": {
|
||||
"not_konn_panel": "Not a recognized Konnected.io device"
|
||||
}
|
||||
|
108
homeassistant/components/myq/binary_sensor.py
Normal file
108
homeassistant/components/myq/binary_sensor.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Support for MyQ gateways."""
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_CONNECTIVITY,
|
||||
BinarySensorDevice,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KNOWN_MODELS,
|
||||
MANUFACTURER,
|
||||
MYQ_COORDINATOR,
|
||||
MYQ_DEVICE_FAMILY,
|
||||
MYQ_DEVICE_FAMILY_GATEWAY,
|
||||
MYQ_DEVICE_STATE,
|
||||
MYQ_DEVICE_STATE_ONLINE,
|
||||
MYQ_GATEWAY,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up mysq covers."""
|
||||
data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
myq = data[MYQ_GATEWAY]
|
||||
coordinator = data[MYQ_COORDINATOR]
|
||||
|
||||
entities = []
|
||||
|
||||
for device in myq.devices.values():
|
||||
if device.device_json[MYQ_DEVICE_FAMILY] == MYQ_DEVICE_FAMILY_GATEWAY:
|
||||
entities.append(MyQBinarySensorDevice(coordinator, device))
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class MyQBinarySensorDevice(BinarySensorDevice):
|
||||
"""Representation of a MyQ gateway."""
|
||||
|
||||
def __init__(self, coordinator, device):
|
||||
"""Initialize with API object, device id."""
|
||||
self._coordinator = coordinator
|
||||
self._device = device
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""We track connectivity for gateways."""
|
||||
return DEVICE_CLASS_CONNECTIVITY
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the garage door if any."""
|
||||
return f"{self._device.name} MyQ Gateway"
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return if the device is online."""
|
||||
if not self._coordinator.last_update_success:
|
||||
return False
|
||||
|
||||
# Not all devices report online so assume True if its missing
|
||||
return self._device.device_json[MYQ_DEVICE_STATE].get(
|
||||
MYQ_DEVICE_STATE_ONLINE, True
|
||||
)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique, Home Assistant friendly identifier for this entity."""
|
||||
return self._device.device_id
|
||||
|
||||
async def async_update(self):
|
||||
"""Update status of cover."""
|
||||
await self._coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device_info of the device."""
|
||||
device_info = {
|
||||
"identifiers": {(DOMAIN, self._device.device_id)},
|
||||
"name": self.name,
|
||||
"manufacturer": MANUFACTURER,
|
||||
"sw_version": self._device.firmware_version,
|
||||
}
|
||||
model = KNOWN_MODELS.get(self._device.device_id[2:4])
|
||||
if model:
|
||||
device_info["model"] = model
|
||||
|
||||
return device_info
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Return False, updates are controlled via coordinator."""
|
||||
return False
|
||||
|
||||
@callback
|
||||
def _async_consume_update(self):
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe to updates."""
|
||||
self._coordinator.async_add_listener(self._async_consume_update)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Undo subscription."""
|
||||
self._coordinator.async_remove_listener(self._async_consume_update)
|
@@ -10,10 +10,14 @@ from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_O
|
||||
|
||||
DOMAIN = "myq"
|
||||
|
||||
PLATFORMS = ["cover"]
|
||||
PLATFORMS = ["cover", "binary_sensor"]
|
||||
|
||||
MYQ_DEVICE_TYPE = "device_type"
|
||||
MYQ_DEVICE_TYPE_GATE = "gate"
|
||||
|
||||
MYQ_DEVICE_FAMILY = "device_family"
|
||||
MYQ_DEVICE_FAMILY_GATEWAY = "gateway"
|
||||
|
||||
MYQ_DEVICE_STATE = "state"
|
||||
MYQ_DEVICE_STATE_ONLINE = "online"
|
||||
|
||||
@@ -39,3 +43,36 @@ TRANSITION_START_DURATION = 7
|
||||
# Estimated time it takes myq to complete a transition
|
||||
# from one state to another
|
||||
TRANSITION_COMPLETE_DURATION = 37
|
||||
|
||||
MANUFACTURER = "The Chamberlain Group Inc."
|
||||
|
||||
KNOWN_MODELS = {
|
||||
"00": "Chamberlain Ethernet Gateway",
|
||||
"01": "LiftMaster Ethernet Gateway",
|
||||
"02": "Craftsman Ethernet Gateway",
|
||||
"03": "Chamberlain Wi-Fi hub",
|
||||
"04": "LiftMaster Wi-Fi hub",
|
||||
"05": "Craftsman Wi-Fi hub",
|
||||
"08": "LiftMaster Wi-Fi GDO DC w/Battery Backup",
|
||||
"09": "Chamberlain Wi-Fi GDO DC w/Battery Backup",
|
||||
"10": "Craftsman Wi-Fi GDO DC 3/4HP",
|
||||
"11": "MyQ Replacement Logic Board Wi-Fi GDO DC 3/4HP",
|
||||
"12": "Chamberlain Wi-Fi GDO DC 1.25HP",
|
||||
"13": "LiftMaster Wi-Fi GDO DC 1.25HP",
|
||||
"14": "Craftsman Wi-Fi GDO DC 1.25HP",
|
||||
"15": "MyQ Replacement Logic Board Wi-Fi GDO DC 1.25HP",
|
||||
"0A": "Chamberlain Wi-Fi GDO or Gate Operator AC",
|
||||
"0B": "LiftMaster Wi-Fi GDO or Gate Operator AC",
|
||||
"0C": "Craftsman Wi-Fi GDO or Gate Operator AC",
|
||||
"0D": "MyQ Replacement Logic Board Wi-Fi GDO or Gate Operator AC",
|
||||
"0E": "Chamberlain Wi-Fi GDO DC 3/4HP",
|
||||
"0F": "LiftMaster Wi-Fi GDO DC 3/4HP",
|
||||
"20": "Chamberlain MyQ Home Bridge",
|
||||
"21": "LiftMaster MyQ Home Bridge",
|
||||
"23": "Chamberlain Smart Garage Hub",
|
||||
"24": "LiftMaster Smart Garage Hub",
|
||||
"27": "LiftMaster Wi-Fi Wall Mount opener",
|
||||
"28": "LiftMaster Commercial Wi-Fi Wall Mount operator",
|
||||
"80": "EU LiftMaster Ethernet Gateway",
|
||||
"81": "EU Chamberlain Ethernet Gateway",
|
||||
}
|
||||
|
@@ -27,6 +27,8 @@ from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KNOWN_MODELS,
|
||||
MANUFACTURER,
|
||||
MYQ_COORDINATOR,
|
||||
MYQ_DEVICE_STATE,
|
||||
MYQ_DEVICE_STATE_ONLINE,
|
||||
@@ -181,9 +183,12 @@ class MyQDevice(CoverDevice):
|
||||
device_info = {
|
||||
"identifiers": {(DOMAIN, self._device.device_id)},
|
||||
"name": self._device.name,
|
||||
"manufacturer": "The Chamberlain Group Inc.",
|
||||
"manufacturer": MANUFACTURER,
|
||||
"sw_version": self._device.firmware_version,
|
||||
}
|
||||
model = KNOWN_MODELS.get(self._device.device_id[2:4])
|
||||
if model:
|
||||
device_info["model"] = model
|
||||
if self._device.parent_device_id:
|
||||
device_info["via_device"] = (DOMAIN, self._device.parent_device_id)
|
||||
return device_info
|
||||
|
@@ -3,10 +3,16 @@ from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from nuheat.config import SCHEDULE_HOLD, SCHEDULE_RUN, SCHEDULE_TEMPORARY_HOLD
|
||||
from nuheat.util import celsius_to_nuheat, fahrenheit_to_nuheat
|
||||
from nuheat.util import (
|
||||
celsius_to_nuheat,
|
||||
fahrenheit_to_nuheat,
|
||||
nuheat_to_celsius,
|
||||
nuheat_to_fahrenheit,
|
||||
)
|
||||
|
||||
from homeassistant.components.climate import ClimateDevice
|
||||
from homeassistant.components.climate.const import (
|
||||
ATTR_HVAC_MODE,
|
||||
CURRENT_HVAC_HEAT,
|
||||
CURRENT_HVAC_IDLE,
|
||||
HVAC_MODE_AUTO,
|
||||
@@ -15,9 +21,10 @@ from homeassistant.components.climate.const import (
|
||||
SUPPORT_TARGET_TEMPERATURE,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
|
||||
from homeassistant.helpers import event as event_helper
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import DOMAIN, MANUFACTURER, NUHEAT_API_STATE_SHIFT_DELAY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -67,6 +74,8 @@ class NuHeatThermostat(ClimateDevice):
|
||||
"""Initialize the thermostat."""
|
||||
self._thermostat = thermostat
|
||||
self._temperature_unit = temperature_unit
|
||||
self._schedule_mode = None
|
||||
self._target_temperature = None
|
||||
self._force_update = False
|
||||
|
||||
@property
|
||||
@@ -107,19 +116,15 @@ class NuHeatThermostat(ClimateDevice):
|
||||
|
||||
def set_hvac_mode(self, hvac_mode):
|
||||
"""Set the system mode."""
|
||||
|
||||
# This is the same as what res
|
||||
if hvac_mode == HVAC_MODE_AUTO:
|
||||
self._thermostat.resume_schedule()
|
||||
self._set_schedule_mode(SCHEDULE_RUN)
|
||||
elif hvac_mode == HVAC_MODE_HEAT:
|
||||
self._thermostat.schedule_mode = SCHEDULE_HOLD
|
||||
|
||||
self._schedule_update()
|
||||
self._set_schedule_mode(SCHEDULE_HOLD)
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
"""Return current setting heat or auto."""
|
||||
if self._thermostat.schedule_mode in (SCHEDULE_TEMPORARY_HOLD, SCHEDULE_HOLD):
|
||||
if self._schedule_mode in (SCHEDULE_TEMPORARY_HOLD, SCHEDULE_HOLD):
|
||||
return HVAC_MODE_HEAT
|
||||
return HVAC_MODE_AUTO
|
||||
|
||||
@@ -148,15 +153,14 @@ class NuHeatThermostat(ClimateDevice):
|
||||
def target_temperature(self):
|
||||
"""Return the currently programmed temperature."""
|
||||
if self._temperature_unit == "C":
|
||||
return self._thermostat.target_celsius
|
||||
return nuheat_to_celsius(self._target_temperature)
|
||||
|
||||
return self._thermostat.target_fahrenheit
|
||||
return nuheat_to_fahrenheit(self._target_temperature)
|
||||
|
||||
@property
|
||||
def preset_mode(self):
|
||||
"""Return current preset mode."""
|
||||
schedule_mode = self._thermostat.schedule_mode
|
||||
return SCHEDULE_MODE_TO_PRESET_MODE_MAP.get(schedule_mode, PRESET_RUN)
|
||||
return SCHEDULE_MODE_TO_PRESET_MODE_MAP.get(self._schedule_mode, PRESET_RUN)
|
||||
|
||||
@property
|
||||
def preset_modes(self):
|
||||
@@ -168,35 +172,44 @@ class NuHeatThermostat(ClimateDevice):
|
||||
"""Return list of possible operation modes."""
|
||||
return OPERATION_LIST
|
||||
|
||||
def resume_program(self):
|
||||
"""Resume the thermostat's programmed schedule."""
|
||||
self._thermostat.resume_schedule()
|
||||
self._schedule_update()
|
||||
|
||||
def set_preset_mode(self, preset_mode):
|
||||
"""Update the hold mode of the thermostat."""
|
||||
|
||||
self._thermostat.schedule_mode = PRESET_MODE_TO_SCHEDULE_MODE_MAP.get(
|
||||
preset_mode, SCHEDULE_RUN
|
||||
self._set_schedule_mode(
|
||||
PRESET_MODE_TO_SCHEDULE_MODE_MAP.get(preset_mode, SCHEDULE_RUN)
|
||||
)
|
||||
|
||||
def _set_schedule_mode(self, schedule_mode):
|
||||
"""Set a schedule mode."""
|
||||
self._schedule_mode = schedule_mode
|
||||
# Changing the property here does the actual set
|
||||
self._thermostat.schedule_mode = schedule_mode
|
||||
self._schedule_update()
|
||||
|
||||
def set_temperature(self, **kwargs):
|
||||
"""Set a new target temperature."""
|
||||
self._set_temperature(kwargs.get(ATTR_TEMPERATURE))
|
||||
self._set_temperature_and_mode(
|
||||
kwargs.get(ATTR_TEMPERATURE), hvac_mode=kwargs.get(ATTR_HVAC_MODE)
|
||||
)
|
||||
|
||||
def _set_temperature(self, temperature):
|
||||
def _set_temperature_and_mode(self, temperature, hvac_mode=None, preset_mode=None):
|
||||
"""Set temperature and hvac mode at the same time."""
|
||||
if self._temperature_unit == "C":
|
||||
target_temp = celsius_to_nuheat(temperature)
|
||||
target_temperature = celsius_to_nuheat(temperature)
|
||||
else:
|
||||
target_temp = fahrenheit_to_nuheat(temperature)
|
||||
target_temperature = fahrenheit_to_nuheat(temperature)
|
||||
|
||||
# If they set a temperature without changing the mode
|
||||
# to heat, we behave like the device does locally
|
||||
# and set a temp hold.
|
||||
target_schedule_mode = SCHEDULE_HOLD
|
||||
if self._thermostat.schedule_mode in (SCHEDULE_RUN, SCHEDULE_TEMPORARY_HOLD):
|
||||
target_schedule_mode = SCHEDULE_TEMPORARY_HOLD
|
||||
target_schedule_mode = SCHEDULE_TEMPORARY_HOLD
|
||||
if preset_mode:
|
||||
target_schedule_mode = PRESET_MODE_TO_SCHEDULE_MODE_MAP.get(
|
||||
preset_mode, SCHEDULE_RUN
|
||||
)
|
||||
elif self._schedule_mode == SCHEDULE_HOLD or (
|
||||
hvac_mode and hvac_mode == HVAC_MODE_HEAT
|
||||
):
|
||||
target_schedule_mode = SCHEDULE_HOLD
|
||||
|
||||
_LOGGER.debug(
|
||||
"Setting NuHeat thermostat temperature to %s %s and schedule mode: %s",
|
||||
@@ -204,15 +217,32 @@ class NuHeatThermostat(ClimateDevice):
|
||||
self.temperature_unit,
|
||||
target_schedule_mode,
|
||||
)
|
||||
# If we do not send schedule_mode we always get
|
||||
# SCHEDULE_HOLD
|
||||
self._thermostat.set_target_temperature(target_temp, target_schedule_mode)
|
||||
|
||||
self._thermostat.set_target_temperature(
|
||||
target_temperature, target_schedule_mode
|
||||
)
|
||||
self._schedule_mode = target_schedule_mode
|
||||
self._target_temperature = target_temperature
|
||||
self._schedule_update()
|
||||
|
||||
def _schedule_update(self):
|
||||
if not self.hass:
|
||||
return
|
||||
|
||||
# Update the new state
|
||||
self.schedule_update_ha_state(False)
|
||||
|
||||
# nuheat has a delay switching state
|
||||
# so we schedule a poll of the api
|
||||
# in the future to make sure the change actually
|
||||
# took effect
|
||||
event_helper.call_later(
|
||||
self.hass, NUHEAT_API_STATE_SHIFT_DELAY, self._schedule_force_refresh
|
||||
)
|
||||
|
||||
def _schedule_force_refresh(self, _):
|
||||
self._force_update = True
|
||||
if self.hass:
|
||||
self.schedule_update_ha_state(True)
|
||||
self.schedule_update_ha_state(True)
|
||||
|
||||
def update(self):
|
||||
"""Get the latest state from the thermostat."""
|
||||
@@ -226,6 +256,8 @@ class NuHeatThermostat(ClimateDevice):
|
||||
def _throttled_update(self, **kwargs):
|
||||
"""Get the latest state from the thermostat with a throttle."""
|
||||
self._thermostat.get_data()
|
||||
self._schedule_mode = self._thermostat.schedule_mode
|
||||
self._target_temperature = self._thermostat.target_temperature
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
@@ -233,5 +265,6 @@ class NuHeatThermostat(ClimateDevice):
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self._thermostat.serial_number)},
|
||||
"name": self._thermostat.room,
|
||||
"model": "nVent Signature",
|
||||
"manufacturer": MANUFACTURER,
|
||||
}
|
||||
|
@@ -7,3 +7,5 @@ PLATFORMS = ["climate"]
|
||||
CONF_SERIAL_NUMBER = "serial_number"
|
||||
|
||||
MANUFACTURER = "NuHeat"
|
||||
|
||||
NUHEAT_API_STATE_SHIFT_DELAY = 4
|
||||
|
@@ -3,6 +3,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from tesla_powerwall import (
|
||||
ApiError,
|
||||
MetersResponse,
|
||||
@@ -21,12 +22,15 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
POWERWALL_API_CHARGE,
|
||||
POWERWALL_API_DEVICE_TYPE,
|
||||
POWERWALL_API_GRID_STATUS,
|
||||
POWERWALL_API_METERS,
|
||||
POWERWALL_API_SITE_INFO,
|
||||
POWERWALL_API_SITEMASTER,
|
||||
POWERWALL_API_STATUS,
|
||||
POWERWALL_COORDINATOR,
|
||||
POWERWALL_HTTP_SESSION,
|
||||
POWERWALL_OBJECT,
|
||||
POWERWALL_SITE_INFO,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -62,10 +66,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
entry_id = entry.entry_id
|
||||
|
||||
hass.data[DOMAIN].setdefault(entry_id, {})
|
||||
power_wall = PowerWall(entry.data[CONF_IP_ADDRESS])
|
||||
http_session = requests.Session()
|
||||
power_wall = PowerWall(entry.data[CONF_IP_ADDRESS], http_session=http_session)
|
||||
try:
|
||||
site_info = await hass.async_add_executor_job(call_site_info, power_wall)
|
||||
powerwall_data = await hass.async_add_executor_job(call_base_info, power_wall)
|
||||
except (PowerWallUnreachableError, ApiError, ConnectionError):
|
||||
http_session.close()
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
async def async_update_data():
|
||||
@@ -80,11 +86,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
)
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
POWERWALL_OBJECT: power_wall,
|
||||
POWERWALL_COORDINATOR: coordinator,
|
||||
POWERWALL_SITE_INFO: site_info,
|
||||
}
|
||||
hass.data[DOMAIN][entry.entry_id] = powerwall_data
|
||||
hass.data[DOMAIN][entry.entry_id].update(
|
||||
{
|
||||
POWERWALL_OBJECT: power_wall,
|
||||
POWERWALL_COORDINATOR: coordinator,
|
||||
POWERWALL_HTTP_SESSION: http_session,
|
||||
}
|
||||
)
|
||||
|
||||
await coordinator.async_refresh()
|
||||
|
||||
@@ -96,9 +105,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
return True
|
||||
|
||||
|
||||
def call_site_info(power_wall):
|
||||
"""Wrap site_info to be a callable."""
|
||||
return power_wall.site_info
|
||||
def call_base_info(power_wall):
|
||||
"""Wrap powerwall properties to be a callable."""
|
||||
return {
|
||||
POWERWALL_API_SITE_INFO: power_wall.site_info,
|
||||
POWERWALL_API_STATUS: power_wall.status,
|
||||
POWERWALL_API_DEVICE_TYPE: power_wall.device_type,
|
||||
}
|
||||
|
||||
|
||||
def _fetch_powerwall_data(power_wall):
|
||||
@@ -124,6 +137,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id][POWERWALL_HTTP_SESSION].close()
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
|
@@ -12,13 +12,15 @@ from .const import (
|
||||
ATTR_NOMINAL_SYSTEM_POWER,
|
||||
ATTR_REGION,
|
||||
DOMAIN,
|
||||
POWERWALL_API_DEVICE_TYPE,
|
||||
POWERWALL_API_GRID_STATUS,
|
||||
POWERWALL_API_SITE_INFO,
|
||||
POWERWALL_API_SITEMASTER,
|
||||
POWERWALL_API_STATUS,
|
||||
POWERWALL_CONNECTED_KEY,
|
||||
POWERWALL_COORDINATOR,
|
||||
POWERWALL_GRID_ONLINE,
|
||||
POWERWALL_RUNNING_KEY,
|
||||
POWERWALL_SITE_INFO,
|
||||
SITE_INFO_GRID_CODE,
|
||||
SITE_INFO_NOMINAL_SYSTEM_POWER_KW,
|
||||
SITE_INFO_REGION,
|
||||
@@ -33,7 +35,9 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
powerwall_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
coordinator = powerwall_data[POWERWALL_COORDINATOR]
|
||||
site_info = powerwall_data[POWERWALL_SITE_INFO]
|
||||
site_info = powerwall_data[POWERWALL_API_SITE_INFO]
|
||||
device_type = powerwall_data[POWERWALL_API_DEVICE_TYPE]
|
||||
status = powerwall_data[POWERWALL_API_STATUS]
|
||||
|
||||
entities = []
|
||||
for sensor_class in (
|
||||
@@ -41,7 +45,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
PowerWallGridStatusSensor,
|
||||
PowerWallConnectedSensor,
|
||||
):
|
||||
entities.append(sensor_class(coordinator, site_info))
|
||||
entities.append(sensor_class(coordinator, site_info, status, device_type))
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
@@ -7,7 +7,6 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries, core, exceptions
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
|
||||
from . import call_site_info
|
||||
from .const import DOMAIN # pylint:disable=unused-import
|
||||
from .const import POWERWALL_SITE_NAME
|
||||
|
||||
@@ -33,6 +32,11 @@ async def validate_input(hass: core.HomeAssistant, data):
|
||||
return {"title": site_info[POWERWALL_SITE_NAME]}
|
||||
|
||||
|
||||
def call_site_info(power_wall):
|
||||
"""Wrap site_info to be a callable."""
|
||||
return power_wall.site_info
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Tesla Powerwall."""
|
||||
|
||||
|
@@ -6,7 +6,6 @@ POWERWALL_SITE_NAME = "site_name"
|
||||
|
||||
POWERWALL_OBJECT = "powerwall"
|
||||
POWERWALL_COORDINATOR = "coordinator"
|
||||
POWERWALL_SITE_INFO = "site_info"
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
|
||||
@@ -24,12 +23,21 @@ SITE_INFO_NOMINAL_SYSTEM_POWER_KW = "nominal_system_power_kW"
|
||||
SITE_INFO_NOMINAL_SYSTEM_ENERGY_KWH = "nominal_system_energy_kWh"
|
||||
SITE_INFO_REGION = "region"
|
||||
|
||||
DEVICE_TYPE_DEVICE_TYPE = "device_type"
|
||||
|
||||
STATUS_VERSION = "version"
|
||||
|
||||
POWERWALL_SITE_NAME = "site_name"
|
||||
|
||||
POWERWALL_API_METERS = "meters"
|
||||
POWERWALL_API_CHARGE = "charge"
|
||||
POWERWALL_API_GRID_STATUS = "grid_status"
|
||||
POWERWALL_API_SITEMASTER = "sitemaster"
|
||||
POWERWALL_API_STATUS = "status"
|
||||
POWERWALL_API_DEVICE_TYPE = "device_type"
|
||||
POWERWALL_API_SITE_INFO = "site_info"
|
||||
|
||||
POWERWALL_HTTP_SESSION = "http_session"
|
||||
|
||||
POWERWALL_GRID_ONLINE = "SystemGridConnected"
|
||||
POWERWALL_CONNECTED_KEY = "connected_to_tesla"
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (
|
||||
DEVICE_TYPE_DEVICE_TYPE,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
MODEL,
|
||||
@@ -10,17 +11,20 @@ from .const import (
|
||||
SITE_INFO_GRID_CODE,
|
||||
SITE_INFO_NOMINAL_SYSTEM_ENERGY_KWH,
|
||||
SITE_INFO_UTILITY,
|
||||
STATUS_VERSION,
|
||||
)
|
||||
|
||||
|
||||
class PowerWallEntity(Entity):
|
||||
"""Base class for powerwall entities."""
|
||||
|
||||
def __init__(self, coordinator, site_info):
|
||||
def __init__(self, coordinator, site_info, status, device_type):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__()
|
||||
self._coordinator = coordinator
|
||||
self._site_info = site_info
|
||||
self._device_type = device_type.get(DEVICE_TYPE_DEVICE_TYPE)
|
||||
self._version = status.get(STATUS_VERSION)
|
||||
# This group of properties will be unique to to the site
|
||||
unique_group = (
|
||||
site_info[SITE_INFO_UTILITY],
|
||||
@@ -32,12 +36,18 @@ class PowerWallEntity(Entity):
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Powerwall device info."""
|
||||
return {
|
||||
device_info = {
|
||||
"identifiers": {(DOMAIN, self.base_unique_id)},
|
||||
"name": self._site_info[POWERWALL_SITE_NAME],
|
||||
"manufacturer": MANUFACTURER,
|
||||
"model": MODEL,
|
||||
}
|
||||
model = MODEL
|
||||
if self._device_type:
|
||||
model += f" ({self._device_type})"
|
||||
device_info["model"] = model
|
||||
if self._version:
|
||||
device_info["sw_version"] = self._version
|
||||
return device_info
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/powerwall",
|
||||
"requirements": [
|
||||
"tesla-powerwall==0.1.1"
|
||||
"tesla-powerwall==0.1.3"
|
||||
],
|
||||
"ssdp": [],
|
||||
"zeroconf": [],
|
||||
@@ -13,4 +13,4 @@
|
||||
"codeowners": [
|
||||
"@bdraco"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -15,9 +15,11 @@ from .const import (
|
||||
ATTR_INSTANT_AVERAGE_VOLTAGE,
|
||||
DOMAIN,
|
||||
POWERWALL_API_CHARGE,
|
||||
POWERWALL_API_DEVICE_TYPE,
|
||||
POWERWALL_API_METERS,
|
||||
POWERWALL_API_SITE_INFO,
|
||||
POWERWALL_API_STATUS,
|
||||
POWERWALL_COORDINATOR,
|
||||
POWERWALL_SITE_INFO,
|
||||
)
|
||||
from .entity import PowerWallEntity
|
||||
|
||||
@@ -30,13 +32,17 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
_LOGGER.debug("Powerwall_data: %s", powerwall_data)
|
||||
|
||||
coordinator = powerwall_data[POWERWALL_COORDINATOR]
|
||||
site_info = powerwall_data[POWERWALL_SITE_INFO]
|
||||
site_info = powerwall_data[POWERWALL_API_SITE_INFO]
|
||||
device_type = powerwall_data[POWERWALL_API_DEVICE_TYPE]
|
||||
status = powerwall_data[POWERWALL_API_STATUS]
|
||||
|
||||
entities = []
|
||||
for meter in coordinator.data[POWERWALL_API_METERS]:
|
||||
entities.append(PowerWallEnergySensor(meter, coordinator, site_info))
|
||||
entities.append(
|
||||
PowerWallEnergySensor(meter, coordinator, site_info, status, device_type)
|
||||
)
|
||||
|
||||
entities.append(PowerWallChargeSensor(coordinator, site_info))
|
||||
entities.append(PowerWallChargeSensor(coordinator, site_info, status, device_type))
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
@@ -73,9 +79,9 @@ class PowerWallChargeSensor(PowerWallEntity):
|
||||
class PowerWallEnergySensor(PowerWallEntity):
|
||||
"""Representation of an Powerwall Energy sensor."""
|
||||
|
||||
def __init__(self, meter, coordinator, site_info):
|
||||
def __init__(self, meter, coordinator, site_info, status, device_type):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, site_info)
|
||||
super().__init__(coordinator, site_info, status, device_type)
|
||||
self._meter = meter
|
||||
|
||||
@property
|
||||
|
@@ -2,26 +2,20 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Optional
|
||||
|
||||
from aiohttp import web
|
||||
from rachiopy import Rachio
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, EVENT_HOMEASSISTANT_STOP, URL_API
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, device_registry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import (
|
||||
CONF_CUSTOM_URL,
|
||||
CONF_MANUAL_RUN_MINS,
|
||||
DEFAULT_MANUAL_RUN_MINS,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
KEY_DEVICES,
|
||||
KEY_ENABLED,
|
||||
@@ -36,8 +30,11 @@ from .const import (
|
||||
KEY_TYPE,
|
||||
KEY_USERNAME,
|
||||
KEY_ZONES,
|
||||
|
||||
RACHIO_API_EXCEPTIONS,
|
||||
)
|
||||
from .device import RachioPerson
|
||||
from .webhooks import WEBHOOK_PATH, RachioWebhookView
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -2,18 +2,23 @@
|
||||
from abc import abstractmethod
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_CONNECTIVITY,
|
||||
BinarySensorDevice,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from . import (
|
||||
from .const import (
|
||||
DOMAIN as DOMAIN_RACHIO,
|
||||
KEY_DEVICE_ID,
|
||||
KEY_STATUS,
|
||||
KEY_SUBTYPE,
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
STATUS_OFFLINE,
|
||||
STATUS_ONLINE,
|
||||
SUBTYPE_OFFLINE,
|
||||
SUBTYPE_ONLINE,
|
||||
RachioDeviceInfoProvider,
|
||||
)
|
||||
from .const import DOMAIN as DOMAIN_RACHIO, KEY_DEVICE_ID, KEY_STATUS, KEY_SUBTYPE
|
||||
from .entity import RachioDevice
|
||||
from .webhooks import SUBTYPE_OFFLINE, SUBTYPE_ONLINE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,23 +37,18 @@ def _create_entities(hass, config_entry):
|
||||
return entities
|
||||
|
||||
|
||||
class RachioControllerBinarySensor(RachioDeviceInfoProvider, BinarySensorDevice):
|
||||
class RachioControllerBinarySensor(RachioDevice, BinarySensorDevice):
|
||||
"""Represent a binary sensor that reflects a Rachio state."""
|
||||
|
||||
def __init__(self, controller, poll=True):
|
||||
"""Set up a new Rachio controller binary sensor."""
|
||||
super().__init__(controller)
|
||||
|
||||
self._undo_dispatcher = None
|
||||
if poll:
|
||||
self._state = self._poll_update()
|
||||
else:
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Declare that this entity pushes its state to HA."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return whether the sensor has a 'true' value."""
|
||||
@@ -66,19 +66,22 @@ class RachioControllerBinarySensor(RachioDeviceInfoProvider, BinarySensorDevice)
|
||||
@abstractmethod
|
||||
def _poll_update(self, data=None) -> bool:
|
||||
"""Request the state from the API."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _handle_update(self, *args, **kwargs) -> None:
|
||||
"""Handle an update to the state of this sensor."""
|
||||
pass
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe to updates."""
|
||||
async_dispatcher_connect(
|
||||
self._undo_dispatcher = async_dispatcher_connect(
|
||||
self.hass, SIGNAL_RACHIO_CONTROLLER_UPDATE, self._handle_any_update
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Unsubscribe from updates."""
|
||||
if self._undo_dispatcher:
|
||||
self._undo_dispatcher()
|
||||
|
||||
|
||||
class RachioControllerOnlineBinarySensor(RachioControllerBinarySensor):
|
||||
"""Represent a binary sensor that reflects if the controller is online."""
|
||||
@@ -101,7 +104,7 @@ class RachioControllerOnlineBinarySensor(RachioControllerBinarySensor):
|
||||
@property
|
||||
def device_class(self) -> str:
|
||||
"""Return the class of this device, from component DEVICE_CLASSES."""
|
||||
return "connectivity"
|
||||
return DEVICE_CLASS_CONNECTIVITY
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
|
@@ -36,6 +36,11 @@ KEY_ZONE_NUMBER = "zoneNumber"
|
||||
KEY_ZONES = "zones"
|
||||
KEY_SCHEDULES = "scheduleRules"
|
||||
KEY_SCHEDULE_ID = "scheduleId"
|
||||
KEY_CUSTOM_SHADE = "customShade"
|
||||
KEY_CUSTOM_CROP = "customCrop"
|
||||
|
||||
ATTR_ZONE_TYPE = "type"
|
||||
ATTR_ZONE_SHADE = "shade"
|
||||
|
||||
# Yes we really do get all these exceptions (hopefully rachiopy switches to requests)
|
||||
RACHIO_API_EXCEPTIONS = (
|
||||
@@ -44,3 +49,11 @@ RACHIO_API_EXCEPTIONS = (
|
||||
OSError,
|
||||
AssertionError,
|
||||
)
|
||||
|
||||
STATUS_ONLINE = "ONLINE"
|
||||
STATUS_OFFLINE = "OFFLINE"
|
||||
|
||||
SIGNAL_RACHIO_UPDATE = DOMAIN + "_update"
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE = SIGNAL_RACHIO_UPDATE + "_controller"
|
||||
SIGNAL_RACHIO_ZONE_UPDATE = SIGNAL_RACHIO_UPDATE + "_zone"
|
||||
SIGNAL_RACHIO_SCHEDULE_UPDATE = SIGNAL_RACHIO_UPDATE + "_schedule"
|
||||
|
180
homeassistant/components/rachio/device.py
Normal file
180
homeassistant/components/rachio/device.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""Adapter to wrap the rachiopy api for home assistant."""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
|
||||
from .const import (
|
||||
KEY_DEVICES,
|
||||
KEY_ENABLED,
|
||||
KEY_EXTERNAL_ID,
|
||||
KEY_ID,
|
||||
KEY_MAC_ADDRESS,
|
||||
KEY_MODEL,
|
||||
KEY_NAME,
|
||||
KEY_SERIAL_NUMBER,
|
||||
KEY_STATUS,
|
||||
KEY_USERNAME,
|
||||
KEY_ZONES,
|
||||
)
|
||||
from .webhooks import LISTEN_EVENT_TYPES, WEBHOOK_CONST_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RachioPerson:
|
||||
"""Represent a Rachio user."""
|
||||
|
||||
def __init__(self, rachio, config_entry):
|
||||
"""Create an object from the provided API instance."""
|
||||
# Use API token to get user ID
|
||||
self.rachio = rachio
|
||||
self.config_entry = config_entry
|
||||
self.username = None
|
||||
self._id = None
|
||||
self._controllers = []
|
||||
|
||||
def setup(self, hass):
|
||||
"""Rachio device setup."""
|
||||
response = self.rachio.person.getInfo()
|
||||
assert int(response[0][KEY_STATUS]) == 200, "API key error"
|
||||
self._id = response[1][KEY_ID]
|
||||
|
||||
# Use user ID to get user data
|
||||
data = self.rachio.person.get(self._id)
|
||||
assert int(data[0][KEY_STATUS]) == 200, "User ID error"
|
||||
self.username = data[1][KEY_USERNAME]
|
||||
devices = data[1][KEY_DEVICES]
|
||||
for controller in devices:
|
||||
webhooks = self.rachio.notification.getDeviceWebhook(controller[KEY_ID])[1]
|
||||
# The API does not provide a way to tell if a controller is shared
|
||||
# or if they are the owner. To work around this problem we fetch the webooks
|
||||
# before we setup the device so we can skip it instead of failing.
|
||||
# webhooks are normally a list, however if there is an error
|
||||
# rachio hands us back a dict
|
||||
if isinstance(webhooks, dict):
|
||||
_LOGGER.error(
|
||||
"Failed to add rachio controller '%s' because of an error: %s",
|
||||
controller[KEY_NAME],
|
||||
webhooks.get("error", "Unknown Error"),
|
||||
)
|
||||
continue
|
||||
|
||||
rachio_iro = RachioIro(hass, self.rachio, controller, webhooks)
|
||||
rachio_iro.setup()
|
||||
self._controllers.append(rachio_iro)
|
||||
_LOGGER.info('Using Rachio API as user "%s"', self.username)
|
||||
|
||||
@property
|
||||
def user_id(self) -> str:
|
||||
"""Get the user ID as defined by the Rachio API."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def controllers(self) -> list:
|
||||
"""Get a list of controllers managed by this account."""
|
||||
return self._controllers
|
||||
|
||||
|
||||
class RachioIro:
|
||||
"""Represent a Rachio Iro."""
|
||||
|
||||
def __init__(self, hass, rachio, data, webhooks):
|
||||
"""Initialize a Rachio device."""
|
||||
self.hass = hass
|
||||
self.rachio = rachio
|
||||
self._id = data[KEY_ID]
|
||||
self.name = data[KEY_NAME]
|
||||
self.serial_number = data[KEY_SERIAL_NUMBER]
|
||||
self.mac_address = data[KEY_MAC_ADDRESS]
|
||||
self.model = data[KEY_MODEL]
|
||||
self._zones = data[KEY_ZONES]
|
||||
self._init_data = data
|
||||
self._webhooks = webhooks
|
||||
_LOGGER.debug('%s has ID "%s"', str(self), self.controller_id)
|
||||
|
||||
def setup(self):
|
||||
"""Rachio Iro setup for webhooks."""
|
||||
# Listen for all updates
|
||||
self._init_webhooks()
|
||||
|
||||
def _init_webhooks(self) -> None:
|
||||
"""Start getting updates from the Rachio API."""
|
||||
current_webhook_id = None
|
||||
|
||||
# First delete any old webhooks that may have stuck around
|
||||
def _deinit_webhooks(_) -> None:
|
||||
"""Stop getting updates from the Rachio API."""
|
||||
if not self._webhooks:
|
||||
# We fetched webhooks when we created the device, however if we call _init_webhooks
|
||||
# again we need to fetch again
|
||||
self._webhooks = self.rachio.notification.getDeviceWebhook(
|
||||
self.controller_id
|
||||
)[1]
|
||||
for webhook in self._webhooks:
|
||||
if (
|
||||
webhook[KEY_EXTERNAL_ID].startswith(WEBHOOK_CONST_ID)
|
||||
or webhook[KEY_ID] == current_webhook_id
|
||||
):
|
||||
self.rachio.notification.deleteWebhook(webhook[KEY_ID])
|
||||
self._webhooks = None
|
||||
|
||||
_deinit_webhooks(None)
|
||||
|
||||
# Choose which events to listen for and get their IDs
|
||||
event_types = []
|
||||
for event_type in self.rachio.notification.getWebhookEventType()[1]:
|
||||
if event_type[KEY_NAME] in LISTEN_EVENT_TYPES:
|
||||
event_types.append({"id": event_type[KEY_ID]})
|
||||
|
||||
# Register to listen to these events from the device
|
||||
url = self.rachio.webhook_url
|
||||
auth = WEBHOOK_CONST_ID + self.rachio.webhook_auth
|
||||
new_webhook = self.rachio.notification.postWebhook(
|
||||
self.controller_id, auth, url, event_types
|
||||
)
|
||||
# Save ID for deletion at shutdown
|
||||
current_webhook_id = new_webhook[1][KEY_ID]
|
||||
self.hass.bus.listen(EVENT_HOMEASSISTANT_STOP, _deinit_webhooks)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Display the controller as a string."""
|
||||
return f'Rachio controller "{self.name}"'
|
||||
|
||||
@property
|
||||
def controller_id(self) -> str:
|
||||
"""Return the Rachio API controller ID."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def current_schedule(self) -> str:
|
||||
"""Return the schedule that the device is running right now."""
|
||||
return self.rachio.device.getCurrentSchedule(self.controller_id)[1]
|
||||
|
||||
@property
|
||||
def init_data(self) -> dict:
|
||||
"""Return the information used to set up the controller."""
|
||||
return self._init_data
|
||||
|
||||
def list_zones(self, include_disabled=False) -> list:
|
||||
"""Return a list of the zone dicts connected to the device."""
|
||||
# All zones
|
||||
if include_disabled:
|
||||
return self._zones
|
||||
|
||||
# Only enabled zones
|
||||
return [z for z in self._zones if z[KEY_ENABLED]]
|
||||
|
||||
def get_zone(self, zone_id) -> Optional[dict]:
|
||||
"""Return the zone with the given ID."""
|
||||
for zone in self.list_zones(include_disabled=True):
|
||||
if zone[KEY_ID] == zone_id:
|
||||
return zone
|
||||
|
||||
return None
|
||||
|
||||
def stop_watering(self) -> None:
|
||||
"""Stop watering all zones connected to this controller."""
|
||||
self.rachio.device.stopWater(self.controller_id)
|
||||
_LOGGER.info("Stopped watering of all zones on %s", str(self))
|
33
homeassistant/components/rachio/entity.py
Normal file
33
homeassistant/components/rachio/entity.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Adapter to wrap the rachiopy api for home assistant."""
|
||||
|
||||
from homeassistant.helpers import device_registry
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
|
||||
|
||||
class RachioDevice(Entity):
|
||||
"""Base class for rachio devices."""
|
||||
|
||||
def __init__(self, controller):
|
||||
"""Initialize a Rachio device."""
|
||||
super().__init__()
|
||||
self._controller = controller
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Declare that this entity pushes its state to HA."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device_info of the device."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self._controller.serial_number,)},
|
||||
"connections": {
|
||||
(device_registry.CONNECTION_NETWORK_MAC, self._controller.mac_address,)
|
||||
},
|
||||
"name": self._controller.name,
|
||||
"model": self._controller.model,
|
||||
"manufacturer": DEFAULT_NAME,
|
||||
}
|
@@ -6,6 +6,7 @@ import logging
|
||||
from homeassistant.components.switch import SwitchDevice
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
|
||||
from . import (
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
SIGNAL_RACHIO_SCHEDULE_UPDATE,
|
||||
@@ -20,10 +21,15 @@ from . import (
|
||||
SUBTYPE_ZONE_STOPPED,
|
||||
RachioDeviceInfoProvider,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
ATTR_ZONE_SHADE,
|
||||
ATTR_ZONE_TYPE,
|
||||
CONF_MANUAL_RUN_MINS,
|
||||
DEFAULT_MANUAL_RUN_MINS,
|
||||
DOMAIN as DOMAIN_RACHIO,
|
||||
KEY_CUSTOM_CROP,
|
||||
KEY_CUSTOM_SHADE,
|
||||
KEY_DEVICE_ID,
|
||||
KEY_DURATION,
|
||||
KEY_ENABLED,
|
||||
@@ -36,6 +42,16 @@ from .const import (
|
||||
KEY_SUMMARY,
|
||||
KEY_ZONE_ID,
|
||||
KEY_ZONE_NUMBER,
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
SIGNAL_RACHIO_ZONE_UPDATE,
|
||||
)
|
||||
from .entity import RachioDevice
|
||||
from .webhooks import (
|
||||
SUBTYPE_SLEEP_MODE_OFF,
|
||||
SUBTYPE_SLEEP_MODE_ON,
|
||||
SUBTYPE_ZONE_COMPLETED,
|
||||
SUBTYPE_ZONE_STARTED,
|
||||
SUBTYPE_ZONE_STOPPED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -74,7 +90,7 @@ def _create_entities(hass, config_entry):
|
||||
return entities
|
||||
|
||||
|
||||
class RachioSwitch(RachioDeviceInfoProvider, SwitchDevice):
|
||||
class RachioSwitch(RachioDevice, SwitchDevice):
|
||||
"""Represent a Rachio state that can be toggled."""
|
||||
|
||||
def __init__(self, controller, poll=True):
|
||||
@@ -86,11 +102,6 @@ class RachioSwitch(RachioDeviceInfoProvider, SwitchDevice):
|
||||
else:
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Declare that this entity pushes its state to HA."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Get a name for this switch."""
|
||||
@@ -104,7 +115,6 @@ class RachioSwitch(RachioDeviceInfoProvider, SwitchDevice):
|
||||
@abstractmethod
|
||||
def _poll_update(self, data=None) -> bool:
|
||||
"""Poll the API."""
|
||||
pass
|
||||
|
||||
def _handle_any_update(self, *args, **kwargs) -> None:
|
||||
"""Determine whether an update event applies to this device."""
|
||||
@@ -118,7 +128,6 @@ class RachioSwitch(RachioDeviceInfoProvider, SwitchDevice):
|
||||
@abstractmethod
|
||||
def _handle_update(self, *args, **kwargs) -> None:
|
||||
"""Handle incoming webhook data."""
|
||||
pass
|
||||
|
||||
|
||||
class RachioStandbySwitch(RachioSwitch):
|
||||
@@ -181,15 +190,19 @@ class RachioZone(RachioSwitch):
|
||||
def __init__(self, person, controller, data, current_schedule):
|
||||
"""Initialize a new Rachio Zone."""
|
||||
self._id = data[KEY_ID]
|
||||
_LOGGER.debug("zone_data: %s", data)
|
||||
self._zone_name = data[KEY_NAME]
|
||||
self._zone_number = data[KEY_ZONE_NUMBER]
|
||||
self._zone_enabled = data[KEY_ENABLED]
|
||||
self._entity_picture = data.get(KEY_IMAGE_URL)
|
||||
self._person = person
|
||||
self._shade_type = data.get(KEY_CUSTOM_SHADE, {}).get(KEY_NAME)
|
||||
self._zone_type = data.get(KEY_CUSTOM_CROP, {}).get(KEY_NAME)
|
||||
self._summary = str()
|
||||
self._current_schedule = current_schedule
|
||||
super().__init__(controller, poll=False)
|
||||
self._state = self.zone_id == self._current_schedule.get(KEY_ZONE_ID)
|
||||
self._undo_dispatcher = None
|
||||
|
||||
def __str__(self):
|
||||
"""Display the zone as a string."""
|
||||
@@ -228,7 +241,12 @@ class RachioZone(RachioSwitch):
|
||||
@property
|
||||
def state_attributes(self) -> dict:
|
||||
"""Return the optional state attributes."""
|
||||
return {ATTR_ZONE_NUMBER: self._zone_number, ATTR_ZONE_SUMMARY: self._summary}
|
||||
props = {ATTR_ZONE_NUMBER: self._zone_number, ATTR_ZONE_SUMMARY: self._summary}
|
||||
if self._shade_type:
|
||||
props[ATTR_ZONE_SHADE] = self._shade_type
|
||||
if self._zone_type:
|
||||
props[ATTR_ZONE_TYPE] = self._zone_type
|
||||
return props
|
||||
|
||||
def turn_on(self, **kwargs) -> None:
|
||||
"""Start watering this zone."""
|
||||
@@ -274,7 +292,7 @@ class RachioZone(RachioSwitch):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe to updates."""
|
||||
async_dispatcher_connect(
|
||||
self._undo_dispatcher = async_dispatcher_connect(
|
||||
self.hass, SIGNAL_RACHIO_ZONE_UPDATE, self._handle_update
|
||||
)
|
||||
|
||||
@@ -370,3 +388,8 @@ class RachioSchedule(RachioSwitch):
|
||||
async_dispatcher_connect(
|
||||
self.hass, SIGNAL_RACHIO_SCHEDULE_UPDATE, self._handle_update
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
"""Unsubscribe from updates."""
|
||||
if self._undo_dispatcher:
|
||||
self._undo_dispatcher()
|
||||
|
96
homeassistant/components/rachio/webhooks.py
Normal file
96
homeassistant/components/rachio/webhooks.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Webhooks used by rachio."""
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import URL_API
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
KEY_EXTERNAL_ID,
|
||||
KEY_TYPE,
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
SIGNAL_RACHIO_SCHEDULE_UPDATE,
|
||||
SIGNAL_RACHIO_ZONE_UPDATE,
|
||||
)
|
||||
|
||||
# Device webhook values
|
||||
TYPE_CONTROLLER_STATUS = "DEVICE_STATUS"
|
||||
SUBTYPE_OFFLINE = "OFFLINE"
|
||||
SUBTYPE_ONLINE = "ONLINE"
|
||||
SUBTYPE_OFFLINE_NOTIFICATION = "OFFLINE_NOTIFICATION"
|
||||
SUBTYPE_COLD_REBOOT = "COLD_REBOOT"
|
||||
SUBTYPE_SLEEP_MODE_ON = "SLEEP_MODE_ON"
|
||||
SUBTYPE_SLEEP_MODE_OFF = "SLEEP_MODE_OFF"
|
||||
SUBTYPE_BROWNOUT_VALVE = "BROWNOUT_VALVE"
|
||||
SUBTYPE_RAIN_SENSOR_DETECTION_ON = "RAIN_SENSOR_DETECTION_ON"
|
||||
SUBTYPE_RAIN_SENSOR_DETECTION_OFF = "RAIN_SENSOR_DETECTION_OFF"
|
||||
SUBTYPE_RAIN_DELAY_ON = "RAIN_DELAY_ON"
|
||||
SUBTYPE_RAIN_DELAY_OFF = "RAIN_DELAY_OFF"
|
||||
|
||||
# Schedule webhook values
|
||||
TYPE_SCHEDULE_STATUS = "SCHEDULE_STATUS"
|
||||
SUBTYPE_SCHEDULE_STARTED = "SCHEDULE_STARTED"
|
||||
SUBTYPE_SCHEDULE_STOPPED = "SCHEDULE_STOPPED"
|
||||
SUBTYPE_SCHEDULE_COMPLETED = "SCHEDULE_COMPLETED"
|
||||
SUBTYPE_WEATHER_NO_SKIP = "WEATHER_INTELLIGENCE_NO_SKIP"
|
||||
SUBTYPE_WEATHER_SKIP = "WEATHER_INTELLIGENCE_SKIP"
|
||||
SUBTYPE_WEATHER_CLIMATE_SKIP = "WEATHER_INTELLIGENCE_CLIMATE_SKIP"
|
||||
SUBTYPE_WEATHER_FREEZE = "WEATHER_INTELLIGENCE_FREEZE"
|
||||
|
||||
# Zone webhook values
|
||||
TYPE_ZONE_STATUS = "ZONE_STATUS"
|
||||
SUBTYPE_ZONE_STARTED = "ZONE_STARTED"
|
||||
SUBTYPE_ZONE_STOPPED = "ZONE_STOPPED"
|
||||
SUBTYPE_ZONE_COMPLETED = "ZONE_COMPLETED"
|
||||
SUBTYPE_ZONE_CYCLING = "ZONE_CYCLING"
|
||||
SUBTYPE_ZONE_CYCLING_COMPLETED = "ZONE_CYCLING_COMPLETED"
|
||||
|
||||
# Webhook callbacks
|
||||
LISTEN_EVENT_TYPES = ["DEVICE_STATUS_EVENT", "ZONE_STATUS_EVENT"]
|
||||
WEBHOOK_CONST_ID = "homeassistant.rachio:"
|
||||
WEBHOOK_PATH = URL_API + DOMAIN
|
||||
|
||||
SIGNAL_MAP = {
|
||||
TYPE_CONTROLLER_STATUS: SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
TYPE_SCHEDULE_STATUS: SIGNAL_RACHIO_SCHEDULE_UPDATE,
|
||||
TYPE_ZONE_STATUS: SIGNAL_RACHIO_ZONE_UPDATE,
|
||||
}
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RachioWebhookView(HomeAssistantView):
|
||||
"""Provide a page for the server to call."""
|
||||
|
||||
requires_auth = False # Handled separately
|
||||
|
||||
def __init__(self, entry_id, webhook_url):
|
||||
"""Initialize the instance of the view."""
|
||||
self._entry_id = entry_id
|
||||
self.url = webhook_url
|
||||
self.name = webhook_url[1:].replace("/", ":")
|
||||
_LOGGER.debug(
|
||||
"Initialize webhook at url: %s, with name %s", self.url, self.name
|
||||
)
|
||||
|
||||
async def post(self, request) -> web.Response:
|
||||
"""Handle webhook calls from the server."""
|
||||
hass = request.app["hass"]
|
||||
data = await request.json()
|
||||
|
||||
try:
|
||||
auth = data.get(KEY_EXTERNAL_ID, str()).split(":")[1]
|
||||
assert auth == hass.data[DOMAIN][self._entry_id].rachio.webhook_auth
|
||||
except (AssertionError, IndexError):
|
||||
return web.Response(status=web.HTTPForbidden.status_code)
|
||||
|
||||
update_type = data[KEY_TYPE]
|
||||
if update_type in SIGNAL_MAP:
|
||||
async_dispatcher_send(hass, SIGNAL_MAP[update_type], data)
|
||||
|
||||
return web.Response(status=web.HTTPNoContent.status_code)
|
@@ -106,7 +106,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
sense_devices_data = SenseDevicesData()
|
||||
sense_discovered_devices = await gateway.get_discovered_device_data()
|
||||
try:
|
||||
sense_discovered_devices = await gateway.get_discovered_device_data()
|
||||
except SENSE_TIMEOUT_EXCEPTIONS:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
SENSE_DATA: gateway,
|
||||
|
@@ -186,10 +186,11 @@ class TadoConnector:
|
||||
device_type="HEATING",
|
||||
mode=None,
|
||||
fan_speed=None,
|
||||
swing=None,
|
||||
):
|
||||
"""Set a zone overlay."""
|
||||
_LOGGER.debug(
|
||||
"Set overlay for zone %s: overlay_mode=%s, temp=%s, duration=%s, type=%s, mode=%s fan_speed=%s",
|
||||
"Set overlay for zone %s: overlay_mode=%s, temp=%s, duration=%s, type=%s, mode=%s fan_speed=%s swing=%s",
|
||||
zone_id,
|
||||
overlay_mode,
|
||||
temperature,
|
||||
@@ -197,6 +198,7 @@ class TadoConnector:
|
||||
device_type,
|
||||
mode,
|
||||
fan_speed,
|
||||
swing,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -208,7 +210,8 @@ class TadoConnector:
|
||||
device_type,
|
||||
"ON",
|
||||
mode,
|
||||
fan_speed,
|
||||
fanSpeed=fan_speed,
|
||||
swing=swing,
|
||||
)
|
||||
|
||||
except RequestException as exc:
|
||||
|
@@ -11,6 +11,7 @@ from homeassistant.components.climate.const import (
|
||||
PRESET_HOME,
|
||||
SUPPORT_FAN_MODE,
|
||||
SUPPORT_PRESET_MODE,
|
||||
SUPPORT_SWING_MODE,
|
||||
SUPPORT_TARGET_TEMPERATURE,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, TEMP_CELSIUS
|
||||
@@ -35,6 +36,7 @@ from .const import (
|
||||
SUPPORT_PRESET,
|
||||
TADO_HVAC_ACTION_TO_HA_HVAC_ACTION,
|
||||
TADO_MODES_WITH_NO_TEMP_SETTING,
|
||||
TADO_SWING_OFF,
|
||||
TADO_TO_HA_FAN_MODE_MAP,
|
||||
TADO_TO_HA_HVAC_MODE_MAP,
|
||||
TYPE_AIR_CONDITIONING,
|
||||
@@ -85,6 +87,9 @@ def create_climate_entity(tado, name: str, zone_id: int):
|
||||
continue
|
||||
|
||||
supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode])
|
||||
if capabilities[mode].get("swings"):
|
||||
support_flags |= SUPPORT_SWING_MODE
|
||||
|
||||
if not capabilities[mode].get("fanSpeeds"):
|
||||
continue
|
||||
|
||||
@@ -197,6 +202,7 @@ class TadoClimate(ClimateDevice):
|
||||
self._current_tado_fan_speed = CONST_FAN_OFF
|
||||
self._current_tado_hvac_mode = CONST_MODE_OFF
|
||||
self._current_tado_hvac_action = CURRENT_HVAC_OFF
|
||||
self._current_tado_swing_mode = TADO_SWING_OFF
|
||||
|
||||
self._undo_dispatcher = None
|
||||
self._tado_zone_data = None
|
||||
@@ -378,6 +384,25 @@ class TadoClimate(ClimateDevice):
|
||||
|
||||
return self._heat_max_temp
|
||||
|
||||
@property
|
||||
def swing_mode(self):
|
||||
"""Active swing mode for the device."""
|
||||
return self._current_tado_swing_mode
|
||||
|
||||
@property
|
||||
def swing_modes(self):
|
||||
"""Swing modes for the device."""
|
||||
if self._support_flags & SUPPORT_SWING_MODE:
|
||||
# Currently we only support off.
|
||||
# On will be added in the future in an update
|
||||
# to PyTado
|
||||
return [TADO_SWING_OFF]
|
||||
return None
|
||||
|
||||
def set_swing_mode(self, swing_mode):
|
||||
"""Set swing modes for the device."""
|
||||
self._control_hvac(swing_mode=swing_mode)
|
||||
|
||||
@callback
|
||||
def _async_update_zone_data(self):
|
||||
"""Load tado data into zone."""
|
||||
@@ -408,7 +433,9 @@ class TadoClimate(ClimateDevice):
|
||||
elif self._target_temp < self._heat_min_temp:
|
||||
self._target_temp = self._heat_min_temp
|
||||
|
||||
def _control_hvac(self, hvac_mode=None, target_temp=None, fan_mode=None):
|
||||
def _control_hvac(
|
||||
self, hvac_mode=None, target_temp=None, fan_mode=None, swing_mode=None
|
||||
):
|
||||
"""Send new target temperature to Tado."""
|
||||
|
||||
if hvac_mode:
|
||||
@@ -420,6 +447,9 @@ class TadoClimate(ClimateDevice):
|
||||
if fan_mode:
|
||||
self._current_tado_fan_speed = fan_mode
|
||||
|
||||
if swing_mode:
|
||||
self._current_tado_swing_mode = swing_mode
|
||||
|
||||
self._normalize_target_temp_for_hvac_mode()
|
||||
|
||||
# tado does not permit setting the fan speed to
|
||||
@@ -464,6 +494,13 @@ class TadoClimate(ClimateDevice):
|
||||
# A temperature cannot be passed with these modes
|
||||
temperature_to_send = None
|
||||
|
||||
fan_speed = None
|
||||
if self._support_flags & SUPPORT_FAN_MODE:
|
||||
fan_speed = self._current_tado_fan_speed
|
||||
swing = None
|
||||
if self._support_flags & SUPPORT_SWING_MODE:
|
||||
swing = self._current_tado_swing_mode
|
||||
|
||||
self._tado.set_zone_overlay(
|
||||
zone_id=self.zone_id,
|
||||
overlay_mode=overlay_mode, # What to do when the period ends
|
||||
@@ -471,9 +508,6 @@ class TadoClimate(ClimateDevice):
|
||||
duration=None,
|
||||
device_type=self.zone_type,
|
||||
mode=self._current_tado_hvac_mode,
|
||||
fan_speed=(
|
||||
self._current_tado_fan_speed
|
||||
if (self._support_flags & SUPPORT_FAN_MODE)
|
||||
else None
|
||||
), # api defaults to not sending fanSpeed if not specified
|
||||
fan_speed=fan_speed, # api defaults to not sending fanSpeed if None specified
|
||||
swing=swing, # api defaults to not sending swing if None specified
|
||||
)
|
||||
|
@@ -131,3 +131,7 @@ TADO_TO_HA_FAN_MODE_MAP = {value: key for key, value in HA_TO_TADO_FAN_MODE_MAP.
|
||||
DEFAULT_TADO_PRECISION = 0.1
|
||||
|
||||
SUPPORT_PRESET = [PRESET_AWAY, PRESET_HOME]
|
||||
|
||||
|
||||
TADO_SWING_OFF = "OFF"
|
||||
TADO_SWING_ON = "ON"
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Tado",
|
||||
"documentation": "https://www.home-assistant.io/integrations/tado",
|
||||
"requirements": [
|
||||
"python-tado==0.5.0"
|
||||
"python-tado==0.6.0"
|
||||
],
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "vizio",
|
||||
"name": "Vizio SmartCast",
|
||||
"name": "VIZIO SmartCast",
|
||||
"documentation": "https://www.home-assistant.io/integrations/vizio",
|
||||
"requirements": ["pyvizio==0.1.44"],
|
||||
"dependencies": [],
|
||||
|
@@ -250,7 +250,7 @@ apcaccess==0.0.13
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.4
|
||||
apprise==0.8.5
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -447,7 +447,7 @@ deluge-client==1.7.1
|
||||
denonavr==0.8.1
|
||||
|
||||
# homeassistant.components.directv
|
||||
directpy==0.7
|
||||
directv==0.2.0
|
||||
|
||||
# homeassistant.components.discogs
|
||||
discogs_client==2.2.2
|
||||
@@ -1668,7 +1668,7 @@ python-songpal==0.11.2
|
||||
python-synology==0.4.0
|
||||
|
||||
# homeassistant.components.tado
|
||||
python-tado==0.5.0
|
||||
python-tado==0.6.0
|
||||
|
||||
# homeassistant.components.telegram_bot
|
||||
python-telegram-bot==11.1.0
|
||||
@@ -2011,7 +2011,7 @@ temperusb==1.5.3
|
||||
# tensorflow==1.13.2
|
||||
|
||||
# homeassistant.components.powerwall
|
||||
tesla-powerwall==0.1.1
|
||||
tesla-powerwall==0.1.3
|
||||
|
||||
# homeassistant.components.tesla
|
||||
teslajsonpy==0.6.0
|
||||
|
@@ -112,7 +112,7 @@ androidtv==0.0.39
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.4
|
||||
apprise==0.8.5
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -178,7 +178,7 @@ defusedxml==0.6.0
|
||||
denonavr==0.8.1
|
||||
|
||||
# homeassistant.components.directv
|
||||
directpy==0.7
|
||||
directv==0.2.0
|
||||
|
||||
# homeassistant.components.updater
|
||||
distro==1.4.0
|
||||
@@ -620,7 +620,7 @@ python-miio==0.4.8
|
||||
python-nest==4.1.0
|
||||
|
||||
# homeassistant.components.tado
|
||||
python-tado==0.5.0
|
||||
python-tado==0.6.0
|
||||
|
||||
# homeassistant.components.twitch
|
||||
python-twitch-client==0.6.0
|
||||
@@ -729,7 +729,7 @@ sunwatcher==0.2.1
|
||||
tellduslive==0.10.10
|
||||
|
||||
# homeassistant.components.powerwall
|
||||
tesla-powerwall==0.1.1
|
||||
tesla-powerwall==0.1.3
|
||||
|
||||
# homeassistant.components.tesla
|
||||
teslajsonpy==0.6.0
|
||||
|
@@ -2,6 +2,7 @@
|
||||
import unittest
|
||||
|
||||
from homeassistant.components.bayesian import binary_sensor as bayesian
|
||||
from homeassistant.const import STATE_UNKNOWN
|
||||
from homeassistant.setup import setup_component
|
||||
|
||||
from tests.common import get_test_home_assistant
|
||||
@@ -18,6 +19,65 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
def test_load_values_when_added_to_hass(self):
|
||||
"""Test that sensor initializes with observations of relevant entities."""
|
||||
|
||||
config = {
|
||||
"binary_sensor": {
|
||||
"name": "Test_Binary",
|
||||
"platform": "bayesian",
|
||||
"observations": [
|
||||
{
|
||||
"platform": "state",
|
||||
"entity_id": "sensor.test_monitored",
|
||||
"to_state": "off",
|
||||
"prob_given_true": 0.8,
|
||||
"prob_given_false": 0.4,
|
||||
}
|
||||
],
|
||||
"prior": 0.2,
|
||||
"probability_threshold": 0.32,
|
||||
}
|
||||
}
|
||||
|
||||
self.hass.states.set("sensor.test_monitored", "off")
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert setup_component(self.hass, "binary_sensor", config)
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
|
||||
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
|
||||
|
||||
def test_unknown_state_does_not_influence_probability(self):
|
||||
"""Test that an unknown state does not change the output probability."""
|
||||
|
||||
config = {
|
||||
"binary_sensor": {
|
||||
"name": "Test_Binary",
|
||||
"platform": "bayesian",
|
||||
"observations": [
|
||||
{
|
||||
"platform": "state",
|
||||
"entity_id": "sensor.test_monitored",
|
||||
"to_state": "off",
|
||||
"prob_given_true": 0.8,
|
||||
"prob_given_false": 0.4,
|
||||
}
|
||||
],
|
||||
"prior": 0.2,
|
||||
"probability_threshold": 0.32,
|
||||
}
|
||||
}
|
||||
|
||||
self.hass.states.set("sensor.test_monitored", STATE_UNKNOWN)
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert setup_component(self.hass, "binary_sensor", config)
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert state.attributes.get("observations") == [None]
|
||||
|
||||
def test_sensor_numeric_state(self):
|
||||
"""Test sensor on numeric state platform observations."""
|
||||
config = {
|
||||
@@ -52,7 +112,7 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
|
||||
assert [] == state.attributes.get("observations")
|
||||
assert [None, None] == state.attributes.get("observations")
|
||||
assert 0.2 == state.attributes.get("probability")
|
||||
|
||||
assert state.state == "off"
|
||||
@@ -66,10 +126,9 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
self.hass.block_till_done()
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert [
|
||||
{"prob_false": 0.4, "prob_true": 0.6},
|
||||
{"prob_false": 0.1, "prob_true": 0.9},
|
||||
] == state.attributes.get("observations")
|
||||
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.6
|
||||
assert state.attributes.get("observations")[1]["prob_given_true"] == 0.9
|
||||
assert state.attributes.get("observations")[1]["prob_given_false"] == 0.1
|
||||
assert round(abs(0.77 - state.attributes.get("probability")), 7) == 0
|
||||
|
||||
assert state.state == "on"
|
||||
@@ -118,7 +177,7 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
|
||||
assert [] == state.attributes.get("observations")
|
||||
assert [None] == state.attributes.get("observations")
|
||||
assert 0.2 == state.attributes.get("probability")
|
||||
|
||||
assert state.state == "off"
|
||||
@@ -131,9 +190,62 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
self.hass.block_till_done()
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert [{"prob_true": 0.8, "prob_false": 0.4}] == state.attributes.get(
|
||||
"observations"
|
||||
)
|
||||
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
|
||||
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
|
||||
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
|
||||
|
||||
assert state.state == "on"
|
||||
|
||||
self.hass.states.set("sensor.test_monitored", "off")
|
||||
self.hass.block_till_done()
|
||||
self.hass.states.set("sensor.test_monitored", "on")
|
||||
self.hass.block_till_done()
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert round(abs(0.2 - state.attributes.get("probability")), 7) == 0
|
||||
|
||||
assert state.state == "off"
|
||||
|
||||
def test_sensor_value_template(self):
|
||||
"""Test sensor on template platform observations."""
|
||||
config = {
|
||||
"binary_sensor": {
|
||||
"name": "Test_Binary",
|
||||
"platform": "bayesian",
|
||||
"observations": [
|
||||
{
|
||||
"platform": "template",
|
||||
"value_template": "{{states('sensor.test_monitored') == 'off'}}",
|
||||
"prob_given_true": 0.8,
|
||||
"prob_given_false": 0.4,
|
||||
}
|
||||
],
|
||||
"prior": 0.2,
|
||||
"probability_threshold": 0.32,
|
||||
}
|
||||
}
|
||||
|
||||
assert setup_component(self.hass, "binary_sensor", config)
|
||||
|
||||
self.hass.states.set("sensor.test_monitored", "on")
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
|
||||
assert [None] == state.attributes.get("observations")
|
||||
assert 0.2 == state.attributes.get("probability")
|
||||
|
||||
assert state.state == "off"
|
||||
|
||||
self.hass.states.set("sensor.test_monitored", "off")
|
||||
self.hass.block_till_done()
|
||||
self.hass.states.set("sensor.test_monitored", "on")
|
||||
self.hass.block_till_done()
|
||||
self.hass.states.set("sensor.test_monitored", "off")
|
||||
self.hass.block_till_done()
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
|
||||
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
|
||||
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
|
||||
|
||||
assert state.state == "on"
|
||||
@@ -210,7 +322,7 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
|
||||
assert [] == state.attributes.get("observations")
|
||||
assert [None, None] == state.attributes.get("observations")
|
||||
assert 0.2 == state.attributes.get("probability")
|
||||
|
||||
assert state.state == "off"
|
||||
@@ -223,9 +335,9 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
self.hass.block_till_done()
|
||||
|
||||
state = self.hass.states.get("binary_sensor.test_binary")
|
||||
assert [{"prob_true": 0.8, "prob_false": 0.4}] == state.attributes.get(
|
||||
"observations"
|
||||
)
|
||||
|
||||
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
|
||||
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
|
||||
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
|
||||
|
||||
assert state.state == "on"
|
||||
@@ -242,20 +354,20 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
|
||||
def test_probability_updates(self):
|
||||
"""Test probability update function."""
|
||||
prob_true = [0.3, 0.6, 0.8]
|
||||
prob_false = [0.7, 0.4, 0.2]
|
||||
prob_given_true = [0.3, 0.6, 0.8]
|
||||
prob_given_false = [0.7, 0.4, 0.2]
|
||||
prior = 0.5
|
||||
|
||||
for pt, pf in zip(prob_true, prob_false):
|
||||
for pt, pf in zip(prob_given_true, prob_given_false):
|
||||
prior = bayesian.update_probability(prior, pt, pf)
|
||||
|
||||
assert round(abs(0.720000 - prior), 7) == 0
|
||||
|
||||
prob_true = [0.8, 0.3, 0.9]
|
||||
prob_false = [0.6, 0.4, 0.2]
|
||||
prob_given_true = [0.8, 0.3, 0.9]
|
||||
prob_given_false = [0.6, 0.4, 0.2]
|
||||
prior = 0.7
|
||||
|
||||
for pt, pf in zip(prob_true, prob_false):
|
||||
for pt, pf in zip(prob_given_true, prob_given_false):
|
||||
prior = bayesian.update_probability(prior, pt, pf)
|
||||
|
||||
assert round(abs(0.9130434782608695 - prior), 7) == 0
|
||||
@@ -271,7 +383,7 @@ class TestBayesianBinarySensor(unittest.TestCase):
|
||||
"platform": "state",
|
||||
"entity_id": "sensor.test_monitored",
|
||||
"to_state": "off",
|
||||
"prob_given_true": 0.8,
|
||||
"prob_given_true": 0.9,
|
||||
"prob_given_false": 0.4,
|
||||
},
|
||||
{
|
||||
|
@@ -1,183 +1,94 @@
|
||||
"""Tests for the DirecTV component."""
|
||||
from DirectPy import DIRECTV
|
||||
|
||||
from homeassistant.components.directv.const import DOMAIN
|
||||
from homeassistant.components.directv.const import CONF_RECEIVER_ID, DOMAIN
|
||||
from homeassistant.components.ssdp import ATTR_SSDP_LOCATION
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.common import MockConfigEntry, load_fixture
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
CLIENT_NAME = "Bedroom Client"
|
||||
CLIENT_ADDRESS = "2CA17D1CD30X"
|
||||
DEFAULT_DEVICE = "0"
|
||||
HOST = "127.0.0.1"
|
||||
MAIN_NAME = "Main DVR"
|
||||
RECEIVER_ID = "028877455858"
|
||||
SSDP_LOCATION = "http://127.0.0.1/"
|
||||
UPNP_SERIAL = "RID-028877455858"
|
||||
|
||||
LIVE = {
|
||||
"callsign": "HASSTV",
|
||||
"date": "20181110",
|
||||
"duration": 3600,
|
||||
"isOffAir": False,
|
||||
"isPclocked": 1,
|
||||
"isPpv": False,
|
||||
"isRecording": False,
|
||||
"isVod": False,
|
||||
"major": 202,
|
||||
"minor": 65535,
|
||||
"offset": 1,
|
||||
"programId": "102454523",
|
||||
"rating": "No Rating",
|
||||
"startTime": 1541876400,
|
||||
"stationId": 3900947,
|
||||
"title": "Using Home Assistant to automate your home",
|
||||
}
|
||||
|
||||
RECORDING = {
|
||||
"callsign": "HASSTV",
|
||||
"date": "20181110",
|
||||
"duration": 3600,
|
||||
"isOffAir": False,
|
||||
"isPclocked": 1,
|
||||
"isPpv": False,
|
||||
"isRecording": True,
|
||||
"isVod": False,
|
||||
"major": 202,
|
||||
"minor": 65535,
|
||||
"offset": 1,
|
||||
"programId": "102454523",
|
||||
"rating": "No Rating",
|
||||
"startTime": 1541876400,
|
||||
"stationId": 3900947,
|
||||
"title": "Using Home Assistant to automate your home",
|
||||
"uniqueId": "12345",
|
||||
"episodeTitle": "Configure DirecTV platform.",
|
||||
}
|
||||
|
||||
MOCK_CONFIG = {DOMAIN: [{CONF_HOST: HOST}]}
|
||||
|
||||
MOCK_GET_LOCATIONS = {
|
||||
"locations": [{"locationName": MAIN_NAME, "clientAddr": DEFAULT_DEVICE}],
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/info/getLocations",
|
||||
},
|
||||
}
|
||||
|
||||
MOCK_GET_LOCATIONS_MULTIPLE = {
|
||||
"locations": [
|
||||
{"locationName": MAIN_NAME, "clientAddr": DEFAULT_DEVICE},
|
||||
{"locationName": CLIENT_NAME, "clientAddr": CLIENT_ADDRESS},
|
||||
],
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/info/getLocations",
|
||||
},
|
||||
}
|
||||
|
||||
MOCK_GET_VERSION = {
|
||||
"accessCardId": "0021-1495-6572",
|
||||
"receiverId": "0288 7745 5858",
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/info/getVersion",
|
||||
},
|
||||
"stbSoftwareVersion": "0x4ed7",
|
||||
"systemTime": 1281625203,
|
||||
"version": "1.2",
|
||||
}
|
||||
MOCK_SSDP_DISCOVERY_INFO = {ATTR_SSDP_LOCATION: SSDP_LOCATION}
|
||||
MOCK_USER_INPUT = {CONF_HOST: HOST}
|
||||
|
||||
|
||||
class MockDirectvClass(DIRECTV):
|
||||
"""A fake DirecTV DVR device."""
|
||||
def mock_connection(aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Mock the DirecTV connection for Home Assistant."""
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/info/getVersion",
|
||||
text=load_fixture("directv/info-get-version.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def __init__(self, ip, port=8080, clientAddr="0", determine_state=False):
|
||||
"""Initialize the fake DirecTV device."""
|
||||
super().__init__(
|
||||
ip=ip, port=port, clientAddr=clientAddr, determine_state=determine_state,
|
||||
)
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/info/getLocations",
|
||||
text=load_fixture("directv/info-get-locations.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
self._play = False
|
||||
self._standby = True
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/info/mode",
|
||||
params={"clientAddr": "9XXXXXXXXXX9"},
|
||||
status=500,
|
||||
text=load_fixture("directv/info-mode-error.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
if self.clientAddr == CLIENT_ADDRESS:
|
||||
self.attributes = RECORDING
|
||||
self._standby = False
|
||||
else:
|
||||
self.attributes = LIVE
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/info/mode",
|
||||
text=load_fixture("directv/info-mode.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def get_locations(self):
|
||||
"""Mock for get_locations method."""
|
||||
return MOCK_GET_LOCATIONS
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/remote/processKey",
|
||||
text=load_fixture("directv/remote-process-key.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def get_serial_num(self):
|
||||
"""Mock for get_serial_num method."""
|
||||
test_serial_num = {
|
||||
"serialNum": "9999999999",
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/info/getSerialNum",
|
||||
},
|
||||
}
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/tv/tune",
|
||||
text=load_fixture("directv/tv-tune.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
return test_serial_num
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/tv/getTuned",
|
||||
params={"clientAddr": "2CA17D1CD30X"},
|
||||
text=load_fixture("directv/tv-get-tuned.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def get_standby(self):
|
||||
"""Mock for get_standby method."""
|
||||
return self._standby
|
||||
|
||||
def get_tuned(self):
|
||||
"""Mock for get_tuned method."""
|
||||
if self._play:
|
||||
self.attributes["offset"] = self.attributes["offset"] + 1
|
||||
|
||||
test_attributes = self.attributes
|
||||
test_attributes["status"] = {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/tv/getTuned",
|
||||
}
|
||||
return test_attributes
|
||||
|
||||
def get_version(self):
|
||||
"""Mock for get_version method."""
|
||||
return MOCK_GET_VERSION
|
||||
|
||||
def key_press(self, keypress):
|
||||
"""Mock for key_press method."""
|
||||
if keypress == "poweron":
|
||||
self._standby = False
|
||||
self._play = True
|
||||
elif keypress == "poweroff":
|
||||
self._standby = True
|
||||
self._play = False
|
||||
elif keypress == "play":
|
||||
self._play = True
|
||||
elif keypress == "pause" or keypress == "stop":
|
||||
self._play = False
|
||||
|
||||
def tune_channel(self, source):
|
||||
"""Mock for tune_channel method."""
|
||||
self.attributes["major"] = int(source)
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/tv/getTuned",
|
||||
text=load_fixture("directv/tv-get-tuned-movie.json"),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
|
||||
async def setup_integration(
|
||||
hass: HomeAssistantType, skip_entry_setup: bool = False
|
||||
hass: HomeAssistantType,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
skip_entry_setup: bool = False,
|
||||
setup_error: bool = False,
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the DirecTV integration in Home Assistant."""
|
||||
if setup_error:
|
||||
aioclient_mock.get(
|
||||
f"http://{HOST}:8080/info/getVersion", status=500,
|
||||
)
|
||||
else:
|
||||
mock_connection(aioclient_mock)
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN, unique_id=RECEIVER_ID, data={CONF_HOST: HOST}
|
||||
domain=DOMAIN,
|
||||
unique_id=RECEIVER_ID,
|
||||
data={CONF_HOST: HOST, CONF_RECEIVER_ID: RECEIVER_ID},
|
||||
)
|
||||
|
||||
entry.add_to_hass(hass)
|
||||
|
@@ -1,11 +1,9 @@
|
||||
"""Test the DirecTV config flow."""
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from aiohttp import ClientError as HTTPClientError
|
||||
from asynctest import patch
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from homeassistant.components.directv.const import DOMAIN
|
||||
from homeassistant.components.ssdp import ATTR_SSDP_LOCATION, ATTR_UPNP_SERIAL
|
||||
from homeassistant.components.directv.const import CONF_RECEIVER_ID, DOMAIN
|
||||
from homeassistant.components.ssdp import ATTR_UPNP_SERIAL
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE
|
||||
from homeassistant.data_entry_flow import (
|
||||
@@ -14,219 +12,259 @@ from homeassistant.data_entry_flow import (
|
||||
RESULT_TYPE_FORM,
|
||||
)
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.directv import (
|
||||
HOST,
|
||||
MOCK_SSDP_DISCOVERY_INFO,
|
||||
MOCK_USER_INPUT,
|
||||
RECEIVER_ID,
|
||||
SSDP_LOCATION,
|
||||
UPNP_SERIAL,
|
||||
MockDirectvClass,
|
||||
mock_connection,
|
||||
setup_integration,
|
||||
)
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def async_configure_flow(
|
||||
hass: HomeAssistantType, flow_id: str, user_input: Optional[Dict] = None
|
||||
) -> Any:
|
||||
"""Set up mock DirecTV integration flow."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.config_flow.DIRECTV", new=MockDirectvClass,
|
||||
):
|
||||
return await hass.config_entries.flow.async_configure(
|
||||
flow_id=flow_id, user_input=user_input
|
||||
)
|
||||
|
||||
|
||||
async def async_init_flow(
|
||||
hass: HomeAssistantType,
|
||||
handler: str = DOMAIN,
|
||||
context: Optional[Dict] = None,
|
||||
data: Any = None,
|
||||
) -> Any:
|
||||
"""Set up mock DirecTV integration flow."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.config_flow.DIRECTV", new=MockDirectvClass,
|
||||
):
|
||||
return await hass.config_entries.flow.async_init(
|
||||
handler=handler, context=context, data=data
|
||||
)
|
||||
|
||||
|
||||
async def test_duplicate_error(hass: HomeAssistantType) -> None:
|
||||
"""Test that errors are shown when duplicates are added."""
|
||||
MockConfigEntry(
|
||||
domain=DOMAIN, unique_id=RECEIVER_ID, data={CONF_HOST: HOST}
|
||||
).add_to_hass(hass)
|
||||
|
||||
result = await async_init_flow(
|
||||
hass, context={CONF_SOURCE: SOURCE_IMPORT}, data={CONF_HOST: HOST}
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
result = await async_init_flow(
|
||||
hass, context={CONF_SOURCE: SOURCE_USER}, data={CONF_HOST: HOST}
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
result = await async_init_flow(
|
||||
hass,
|
||||
context={CONF_SOURCE: SOURCE_SSDP},
|
||||
data={ATTR_SSDP_LOCATION: SSDP_LOCATION, ATTR_UPNP_SERIAL: UPNP_SERIAL},
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_form(hass: HomeAssistantType) -> None:
|
||||
"""Test we get the form."""
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
async def test_show_user_form(hass: HomeAssistantType) -> None:
|
||||
"""Test that the user set up form is served."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER},
|
||||
)
|
||||
|
||||
assert result["step_id"] == "user"
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.directv.async_setup", return_value=True
|
||||
) as mock_setup, patch(
|
||||
"homeassistant.components.directv.async_setup_entry", return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {CONF_HOST: HOST})
|
||||
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {CONF_HOST: HOST}
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_cannot_connect(hass: HomeAssistantType) -> None:
|
||||
"""Test we handle cannot connect error."""
|
||||
async def test_show_ssdp_form(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test that the ssdp confirmation form is served."""
|
||||
mock_connection(aioclient_mock)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch(
|
||||
"tests.components.directv.test_config_flow.MockDirectvClass.get_version",
|
||||
side_effect=RequestException,
|
||||
) as mock_validate_input:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {CONF_HOST: HOST},)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_validate_input.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_unknown_error(hass: HomeAssistantType) -> None:
|
||||
"""Test we handle unknown error."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch(
|
||||
"tests.components.directv.test_config_flow.MockDirectvClass.get_version",
|
||||
side_effect=Exception,
|
||||
) as mock_validate_input:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {CONF_HOST: HOST},)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "unknown"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_validate_input.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_import(hass: HomeAssistantType) -> None:
|
||||
"""Test the import step."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.async_setup", return_value=True
|
||||
) as mock_setup, patch(
|
||||
"homeassistant.components.directv.async_setup_entry", return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await async_init_flow(
|
||||
hass, context={CONF_SOURCE: SOURCE_IMPORT}, data={CONF_HOST: HOST},
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {CONF_HOST: HOST}
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_ssdp_discovery(hass: HomeAssistantType) -> None:
|
||||
"""Test the ssdp discovery step."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_SSDP},
|
||||
data={ATTR_SSDP_LOCATION: SSDP_LOCATION, ATTR_UPNP_SERIAL: UPNP_SERIAL},
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "ssdp_confirm"
|
||||
assert result["description_placeholders"] == {CONF_NAME: HOST}
|
||||
|
||||
|
||||
async def test_cannot_connect(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we show user form on connection error."""
|
||||
aioclient_mock.get("http://127.0.0.1:8080/info/getVersion", exc=HTTPClientError)
|
||||
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "user"
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
|
||||
async def test_ssdp_cannot_connect(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow on connection error."""
|
||||
aioclient_mock.get("http://127.0.0.1:8080/info/getVersion", exc=HTTPClientError)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
|
||||
async def test_ssdp_confirm_cannot_connect(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow on connection error."""
|
||||
aioclient_mock.get("http://127.0.0.1:8080/info/getVersion", exc=HTTPClientError)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_SSDP, CONF_HOST: HOST, CONF_NAME: HOST},
|
||||
data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
|
||||
async def test_user_device_exists_abort(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort user flow if DirecTV receiver already configured."""
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_ssdp_device_exists_abort(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow if DirecTV receiver already configured."""
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_ssdp_with_receiver_id_device_exists_abort(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow if DirecTV receiver already configured."""
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
discovery_info[ATTR_UPNP_SERIAL] = UPNP_SERIAL
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_unknown_error(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we show user form on unknown error."""
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
with patch(
|
||||
"homeassistant.components.directv.async_setup", return_value=True
|
||||
) as mock_setup, patch(
|
||||
"homeassistant.components.directv.async_setup_entry", return_value=True,
|
||||
) as mock_setup_entry:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {})
|
||||
"homeassistant.components.directv.config_flow.DIRECTV.update",
|
||||
side_effect=Exception,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "unknown"
|
||||
|
||||
|
||||
async def test_ssdp_unknown_error(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow on unknown error."""
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
with patch(
|
||||
"homeassistant.components.directv.config_flow.DIRECTV.update",
|
||||
side_effect=Exception,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "unknown"
|
||||
|
||||
|
||||
async def test_ssdp_confirm_unknown_error(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test we abort SSDP flow on unknown error."""
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
with patch(
|
||||
"homeassistant.components.directv.config_flow.DIRECTV.update",
|
||||
side_effect=Exception,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_SSDP, CONF_HOST: HOST, CONF_NAME: HOST},
|
||||
data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "unknown"
|
||||
|
||||
|
||||
async def test_full_import_flow_implementation(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the full manual user flow from start to finish."""
|
||||
mock_connection(aioclient_mock)
|
||||
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_IMPORT}, data=user_input,
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {CONF_HOST: HOST}
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
assert result["data"]
|
||||
assert result["data"][CONF_HOST] == HOST
|
||||
assert result["data"][CONF_RECEIVER_ID] == RECEIVER_ID
|
||||
|
||||
|
||||
async def test_ssdp_discovery_confirm_abort(hass: HomeAssistantType) -> None:
|
||||
"""Test we handle SSDP confirm cannot connect error."""
|
||||
async def test_full_user_flow_implementation(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the full manual user flow from start to finish."""
|
||||
mock_connection(aioclient_mock)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_SSDP},
|
||||
data={ATTR_SSDP_LOCATION: SSDP_LOCATION, ATTR_UPNP_SERIAL: UPNP_SERIAL},
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_USER},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"tests.components.directv.test_config_flow.MockDirectvClass.get_version",
|
||||
side_effect=RequestException,
|
||||
) as mock_validate_input:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {})
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_validate_input.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_ssdp_discovery_confirm_unknown_error(hass: HomeAssistantType) -> None:
|
||||
"""Test we handle SSDP confirm unknown error."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_SSDP},
|
||||
data={ATTR_SSDP_LOCATION: SSDP_LOCATION, ATTR_UPNP_SERIAL: UPNP_SERIAL},
|
||||
user_input = MOCK_USER_INPUT.copy()
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=user_input,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"tests.components.directv.test_config_flow.MockDirectvClass.get_version",
|
||||
side_effect=Exception,
|
||||
) as mock_validate_input:
|
||||
result = await async_configure_flow(hass, result["flow_id"], {})
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["data"]
|
||||
assert result["data"][CONF_HOST] == HOST
|
||||
assert result["data"][CONF_RECEIVER_ID] == RECEIVER_ID
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_validate_input.mock_calls) == 1
|
||||
|
||||
async def test_full_ssdp_flow_implementation(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the full SSDP flow from start to finish."""
|
||||
mock_connection(aioclient_mock)
|
||||
|
||||
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "ssdp_confirm"
|
||||
assert result["description_placeholders"] == {CONF_NAME: HOST}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={}
|
||||
)
|
||||
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
|
||||
assert result["data"]
|
||||
assert result["data"][CONF_HOST] == HOST
|
||||
assert result["data"][CONF_RECEIVER_ID] == RECEIVER_ID
|
||||
|
@@ -1,7 +1,4 @@
|
||||
"""Tests for the Roku integration."""
|
||||
from asynctest import patch
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
"""Tests for the DirecTV integration."""
|
||||
from homeassistant.components.directv.const import DOMAIN
|
||||
from homeassistant.config_entries import (
|
||||
ENTRY_STATE_LOADED,
|
||||
@@ -9,34 +6,36 @@ from homeassistant.config_entries import (
|
||||
ENTRY_STATE_SETUP_RETRY,
|
||||
)
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.components.directv import MockDirectvClass, setup_integration
|
||||
from tests.components.directv import MOCK_CONFIG, mock_connection, setup_integration
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
|
||||
async def test_config_entry_not_ready(hass: HomeAssistantType) -> None:
|
||||
async def test_setup(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the DirecTV setup from configuration."""
|
||||
mock_connection(aioclient_mock)
|
||||
assert await async_setup_component(hass, DOMAIN, MOCK_CONFIG)
|
||||
|
||||
|
||||
async def test_config_entry_not_ready(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the DirecTV configuration entry not ready."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.DIRECTV", new=MockDirectvClass,
|
||||
), patch(
|
||||
"homeassistant.components.directv.DIRECTV.get_locations",
|
||||
side_effect=RequestException,
|
||||
):
|
||||
entry = await setup_integration(hass)
|
||||
entry = await setup_integration(hass, aioclient_mock, setup_error=True)
|
||||
|
||||
assert entry.state == ENTRY_STATE_SETUP_RETRY
|
||||
|
||||
|
||||
async def test_unload_config_entry(hass: HomeAssistantType) -> None:
|
||||
async def test_unload_config_entry(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test the DirecTV configuration entry unloading."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.DIRECTV", new=MockDirectvClass,
|
||||
), patch(
|
||||
"homeassistant.components.directv.media_player.async_setup_entry",
|
||||
return_value=True,
|
||||
):
|
||||
entry = await setup_integration(hass)
|
||||
entry = await setup_integration(hass, aioclient_mock)
|
||||
|
||||
assert entry.entry_id in hass.data[DOMAIN]
|
||||
assert entry.state == ENTRY_STATE_LOADED
|
||||
|
@@ -4,7 +4,6 @@ from typing import Optional
|
||||
|
||||
from asynctest import patch
|
||||
from pytest import fixture
|
||||
from requests import RequestException
|
||||
|
||||
from homeassistant.components.directv.media_player import (
|
||||
ATTR_MEDIA_CURRENTLY_RECORDING,
|
||||
@@ -24,6 +23,7 @@ from homeassistant.components.media_player.const import (
|
||||
ATTR_MEDIA_SERIES_TITLE,
|
||||
ATTR_MEDIA_TITLE,
|
||||
DOMAIN as MP_DOMAIN,
|
||||
MEDIA_TYPE_MOVIE,
|
||||
MEDIA_TYPE_TVSHOW,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SUPPORT_NEXT_TRACK,
|
||||
@@ -44,7 +44,6 @@ from homeassistant.const import (
|
||||
SERVICE_MEDIA_STOP,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
STATE_OFF,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
STATE_UNAVAILABLE,
|
||||
@@ -52,18 +51,13 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.directv import (
|
||||
DOMAIN,
|
||||
MOCK_GET_LOCATIONS_MULTIPLE,
|
||||
RECORDING,
|
||||
MockDirectvClass,
|
||||
setup_integration,
|
||||
)
|
||||
from tests.components.directv import setup_integration
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
ATTR_UNIQUE_ID = "unique_id"
|
||||
CLIENT_ENTITY_ID = f"{MP_DOMAIN}.bedroom_client"
|
||||
MAIN_ENTITY_ID = f"{MP_DOMAIN}.main_dvr"
|
||||
CLIENT_ENTITY_ID = f"{MP_DOMAIN}.client"
|
||||
MAIN_ENTITY_ID = f"{MP_DOMAIN}.host"
|
||||
UNAVAILABLE_ENTITY_ID = f"{MP_DOMAIN}.unavailable_client"
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
@@ -74,29 +68,6 @@ def mock_now() -> datetime:
|
||||
return dt_util.utcnow()
|
||||
|
||||
|
||||
async def setup_directv(hass: HomeAssistantType) -> MockConfigEntry:
|
||||
"""Set up mock DirecTV integration."""
|
||||
with patch(
|
||||
"homeassistant.components.directv.DIRECTV", new=MockDirectvClass,
|
||||
):
|
||||
return await setup_integration(hass)
|
||||
|
||||
|
||||
async def setup_directv_with_locations(hass: HomeAssistantType) -> MockConfigEntry:
|
||||
"""Set up mock DirecTV integration."""
|
||||
with patch(
|
||||
"tests.components.directv.test_media_player.MockDirectvClass.get_locations",
|
||||
return_value=MOCK_GET_LOCATIONS_MULTIPLE,
|
||||
):
|
||||
with patch(
|
||||
"homeassistant.components.directv.DIRECTV", new=MockDirectvClass,
|
||||
), patch(
|
||||
"homeassistant.components.directv.media_player.DIRECTV",
|
||||
new=MockDirectvClass,
|
||||
):
|
||||
return await setup_integration(hass)
|
||||
|
||||
|
||||
async def async_turn_on(
|
||||
hass: HomeAssistantType, entity_id: Optional[str] = None
|
||||
) -> None:
|
||||
@@ -172,23 +143,21 @@ async def async_play_media(
|
||||
await hass.services.async_call(MP_DOMAIN, SERVICE_PLAY_MEDIA, data)
|
||||
|
||||
|
||||
async def test_setup(hass: HomeAssistantType) -> None:
|
||||
async def test_setup(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with basic config."""
|
||||
await setup_directv(hass)
|
||||
assert hass.states.get(MAIN_ENTITY_ID)
|
||||
|
||||
|
||||
async def test_setup_with_multiple_locations(hass: HomeAssistantType) -> None:
|
||||
"""Test setup with basic config with client location."""
|
||||
await setup_directv_with_locations(hass)
|
||||
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
assert hass.states.get(MAIN_ENTITY_ID)
|
||||
assert hass.states.get(CLIENT_ENTITY_ID)
|
||||
assert hass.states.get(UNAVAILABLE_ENTITY_ID)
|
||||
|
||||
|
||||
async def test_unique_id(hass: HomeAssistantType) -> None:
|
||||
async def test_unique_id(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test unique id."""
|
||||
await setup_directv_with_locations(hass)
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
entity_registry = await hass.helpers.entity_registry.async_get_registry()
|
||||
|
||||
@@ -198,10 +167,15 @@ async def test_unique_id(hass: HomeAssistantType) -> None:
|
||||
client = entity_registry.async_get(CLIENT_ENTITY_ID)
|
||||
assert client.unique_id == "2CA17D1CD30X"
|
||||
|
||||
unavailable_client = entity_registry.async_get(UNAVAILABLE_ENTITY_ID)
|
||||
assert unavailable_client.unique_id == "9XXXXXXXXXX9"
|
||||
|
||||
async def test_supported_features(hass: HomeAssistantType) -> None:
|
||||
|
||||
async def test_supported_features(
|
||||
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test supported features."""
|
||||
await setup_directv_with_locations(hass)
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
# Features supported for main DVR
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
@@ -231,168 +205,123 @@ async def test_supported_features(hass: HomeAssistantType) -> None:
|
||||
|
||||
|
||||
async def test_check_attributes(
|
||||
hass: HomeAssistantType, mock_now: dt_util.dt.datetime
|
||||
hass: HomeAssistantType,
|
||||
mock_now: dt_util.dt.datetime,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test attributes."""
|
||||
await setup_directv_with_locations(hass)
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_PLAYING
|
||||
|
||||
# Start playing TV
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
await async_media_play(hass, CLIENT_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
assert state.attributes.get(ATTR_MEDIA_CONTENT_ID) == "17016356"
|
||||
assert state.attributes.get(ATTR_MEDIA_CONTENT_TYPE) == MEDIA_TYPE_MOVIE
|
||||
assert state.attributes.get(ATTR_MEDIA_DURATION) == 7200
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION) == 4437
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT)
|
||||
assert state.attributes.get(ATTR_MEDIA_TITLE) == "Snow Bride"
|
||||
assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None
|
||||
assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("HALLHD", "312")
|
||||
assert state.attributes.get(ATTR_INPUT_SOURCE) == "312"
|
||||
assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING)
|
||||
assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-G"
|
||||
assert not state.attributes.get(ATTR_MEDIA_RECORDED)
|
||||
assert state.attributes.get(ATTR_MEDIA_START_TIME) == datetime(
|
||||
2020, 3, 21, 13, 0, tzinfo=dt_util.UTC
|
||||
)
|
||||
|
||||
state = hass.states.get(CLIENT_ENTITY_ID)
|
||||
assert state.state == STATE_PLAYING
|
||||
|
||||
assert state.attributes.get(ATTR_MEDIA_CONTENT_ID) == RECORDING["programId"]
|
||||
assert state.attributes.get(ATTR_MEDIA_CONTENT_ID) == "4405732"
|
||||
assert state.attributes.get(ATTR_MEDIA_CONTENT_TYPE) == MEDIA_TYPE_TVSHOW
|
||||
assert state.attributes.get(ATTR_MEDIA_DURATION) == RECORDING["duration"]
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION) == 2
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) == next_update
|
||||
assert state.attributes.get(ATTR_MEDIA_TITLE) == RECORDING["title"]
|
||||
assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) == RECORDING["episodeTitle"]
|
||||
assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format(
|
||||
RECORDING["callsign"], RECORDING["major"]
|
||||
)
|
||||
assert state.attributes.get(ATTR_INPUT_SOURCE) == RECORDING["major"]
|
||||
assert (
|
||||
state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING) == RECORDING["isRecording"]
|
||||
)
|
||||
assert state.attributes.get(ATTR_MEDIA_RATING) == RECORDING["rating"]
|
||||
assert state.attributes.get(ATTR_MEDIA_DURATION) == 1791
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION) == 263
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT)
|
||||
assert state.attributes.get(ATTR_MEDIA_TITLE) == "Tyler's Ultimate"
|
||||
assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) == "Spaghetti and Clam Sauce"
|
||||
assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("FOODHD", "231")
|
||||
assert state.attributes.get(ATTR_INPUT_SOURCE) == "231"
|
||||
assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING)
|
||||
assert state.attributes.get(ATTR_MEDIA_RATING) == "No Rating"
|
||||
assert state.attributes.get(ATTR_MEDIA_RECORDED)
|
||||
assert state.attributes.get(ATTR_MEDIA_START_TIME) == datetime(
|
||||
2018, 11, 10, 19, 0, tzinfo=dt_util.UTC
|
||||
2010, 7, 5, 15, 0, 8, tzinfo=dt_util.UTC
|
||||
)
|
||||
|
||||
state = hass.states.get(UNAVAILABLE_ENTITY_ID)
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
async def test_attributes_paused(
|
||||
hass: HomeAssistantType,
|
||||
mock_now: dt_util.dt.datetime,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
):
|
||||
"""Test attributes while paused."""
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
state = hass.states.get(CLIENT_ENTITY_ID)
|
||||
last_updated = state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT)
|
||||
|
||||
# Test to make sure that ATTR_MEDIA_POSITION_UPDATED_AT is not
|
||||
# updated if TV is paused.
|
||||
with patch(
|
||||
"homeassistant.util.dt.utcnow", return_value=next_update + timedelta(minutes=5)
|
||||
"homeassistant.util.dt.utcnow", return_value=mock_now + timedelta(minutes=5)
|
||||
):
|
||||
await async_media_pause(hass, CLIENT_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(CLIENT_ENTITY_ID)
|
||||
assert state.state == STATE_PAUSED
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) == next_update
|
||||
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT) == last_updated
|
||||
|
||||
|
||||
async def test_main_services(
|
||||
hass: HomeAssistantType, mock_now: dt_util.dt.datetime
|
||||
hass: HomeAssistantType,
|
||||
mock_now: dt_util.dt.datetime,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test the different services."""
|
||||
await setup_directv(hass)
|
||||
await setup_integration(hass, aioclient_mock)
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_turn_off(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
# DVR starts in off state.
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_OFF
|
||||
remote_mock.assert_called_once_with("poweroff", "0")
|
||||
|
||||
# Turn main DVR on. When turning on DVR is playing.
|
||||
await async_turn_on(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_PLAYING
|
||||
|
||||
# Pause live TV.
|
||||
await async_media_pause(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_PAUSED
|
||||
|
||||
# Start play again for live TV.
|
||||
await async_media_play(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_PLAYING
|
||||
|
||||
# Change channel, currently it should be 202
|
||||
assert state.attributes.get("source") == 202
|
||||
await async_play_media(hass, "channel", 7, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.attributes.get("source") == 7
|
||||
|
||||
# Stop live TV.
|
||||
await async_media_stop(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_PAUSED
|
||||
|
||||
# Turn main DVR off.
|
||||
await async_turn_off(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_OFF
|
||||
|
||||
|
||||
async def test_available(
|
||||
hass: HomeAssistantType, mock_now: dt_util.dt.datetime
|
||||
) -> None:
|
||||
"""Test available status."""
|
||||
entry = await setup_directv(hass)
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_turn_on(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("poweron", "0")
|
||||
|
||||
# Confirm service is currently set to available.
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
|
||||
assert hass.data[DOMAIN]
|
||||
assert hass.data[DOMAIN][entry.entry_id]
|
||||
assert hass.data[DOMAIN][entry.entry_id]["client"]
|
||||
|
||||
main_dtv = hass.data[DOMAIN][entry.entry_id]["client"]
|
||||
|
||||
# Make update fail 1st time
|
||||
next_update = next_update + timedelta(minutes=5)
|
||||
with patch.object(main_dtv, "get_standby", side_effect=RequestException), patch(
|
||||
"homeassistant.util.dt.utcnow", return_value=next_update
|
||||
):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_media_pause(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("pause", "0")
|
||||
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
|
||||
# Make update fail 2nd time within 1 minute
|
||||
next_update = next_update + timedelta(seconds=30)
|
||||
with patch.object(main_dtv, "get_standby", side_effect=RequestException), patch(
|
||||
"homeassistant.util.dt.utcnow", return_value=next_update
|
||||
):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_media_play(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("play", "0")
|
||||
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
|
||||
# Make update fail 3rd time more then a minute after 1st failure
|
||||
next_update = next_update + timedelta(minutes=1)
|
||||
with patch.object(main_dtv, "get_standby", side_effect=RequestException), patch(
|
||||
"homeassistant.util.dt.utcnow", return_value=next_update
|
||||
):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_media_next_track(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("ffwd", "0")
|
||||
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
# Recheck state, update should work again.
|
||||
next_update = next_update + timedelta(minutes=5)
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_media_previous_track(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("rew", "0")
|
||||
|
||||
state = hass.states.get(MAIN_ENTITY_ID)
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
with patch("directv.DIRECTV.remote") as remote_mock:
|
||||
await async_media_stop(hass, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
remote_mock.assert_called_once_with("stop", "0")
|
||||
|
||||
with patch("directv.DIRECTV.tune") as tune_mock:
|
||||
await async_play_media(hass, "channel", 312, MAIN_ENTITY_ID)
|
||||
await hass.async_block_till_done()
|
||||
tune_mock.assert_called_once_with("312", "0")
|
||||
|
@@ -140,10 +140,33 @@ async def test_form_zeroconf_wrong_oui(hass):
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data={
|
||||
"properties": {"macaddress": "notdoorbirdoui"},
|
||||
"host": "192.168.1.8",
|
||||
"name": "Doorstation - abc123._axis-video._tcp.local.",
|
||||
},
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_doorbird_device"
|
||||
|
||||
|
||||
async def test_form_zeroconf_link_local_ignored(hass):
|
||||
"""Test we abort when we get a link local address via zeroconf."""
|
||||
await hass.async_add_executor_job(
|
||||
init_recorder_component, hass
|
||||
) # force in memory db
|
||||
|
||||
await setup.async_setup_component(hass, "persistent_notification", {})
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data={
|
||||
"properties": {"macaddress": "1CCAE3DOORBIRD"},
|
||||
"host": "169.254.103.61",
|
||||
"name": "Doorstation - abc123._axis-video._tcp.local.",
|
||||
},
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "link_local_address"
|
||||
|
||||
|
||||
async def test_form_zeroconf_correct_oui(hass):
|
||||
|
@@ -442,6 +442,9 @@ async def test_execute(hass):
|
||||
"source": "cloud",
|
||||
}
|
||||
|
||||
service_events = sorted(
|
||||
service_events, key=lambda ev: ev.data["service_data"]["entity_id"]
|
||||
)
|
||||
assert len(service_events) == 4
|
||||
assert service_events[0].data == {
|
||||
"domain": "light",
|
||||
|
@@ -5,7 +5,9 @@ from unittest.mock import patch
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.climate.const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_HVAC_MODE,
|
||||
ATTR_HVAC_MODES,
|
||||
@@ -18,6 +20,7 @@ from homeassistant.components.climate.const import (
|
||||
CURRENT_HVAC_HEAT,
|
||||
CURRENT_HVAC_IDLE,
|
||||
DEFAULT_MAX_TEMP,
|
||||
DEFAULT_MIN_HUMIDITY,
|
||||
DEFAULT_MIN_TEMP,
|
||||
DOMAIN as DOMAIN_CLIMATE,
|
||||
HVAC_MODE_AUTO,
|
||||
@@ -99,6 +102,8 @@ async def test_thermostat(hass, hk_driver, cls, events):
|
||||
assert acc.char_display_units.value == 0
|
||||
assert acc.char_cooling_thresh_temp is None
|
||||
assert acc.char_heating_thresh_temp is None
|
||||
assert acc.char_target_humidity is None
|
||||
assert acc.char_current_humidity is None
|
||||
|
||||
assert acc.char_target_temp.properties[PROP_MAX_VALUE] == DEFAULT_MAX_TEMP
|
||||
assert acc.char_target_temp.properties[PROP_MIN_VALUE] == DEFAULT_MIN_TEMP
|
||||
@@ -357,6 +362,49 @@ async def test_thermostat_auto(hass, hk_driver, cls, events):
|
||||
assert events[-1].data[ATTR_VALUE] == "cooling threshold 25.0°C"
|
||||
|
||||
|
||||
async def test_thermostat_humidity(hass, hk_driver, cls, events):
|
||||
"""Test if accessory and HA are updated accordingly with humidity."""
|
||||
entity_id = "climate.test"
|
||||
|
||||
# support_auto = True
|
||||
hass.states.async_set(entity_id, HVAC_MODE_OFF, {ATTR_SUPPORTED_FEATURES: 4})
|
||||
await hass.async_block_till_done()
|
||||
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
|
||||
await hass.async_add_job(acc.run)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert acc.char_target_humidity.value == 50
|
||||
assert acc.char_current_humidity.value == 50
|
||||
|
||||
assert acc.char_target_humidity.properties[PROP_MIN_VALUE] == DEFAULT_MIN_HUMIDITY
|
||||
|
||||
hass.states.async_set(
|
||||
entity_id, HVAC_MODE_HEAT_COOL, {ATTR_HUMIDITY: 65, ATTR_CURRENT_HUMIDITY: 40},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert acc.char_current_humidity.value == 40
|
||||
assert acc.char_target_humidity.value == 65
|
||||
|
||||
hass.states.async_set(
|
||||
entity_id, HVAC_MODE_COOL, {ATTR_HUMIDITY: 35, ATTR_CURRENT_HUMIDITY: 70},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert acc.char_current_humidity.value == 70
|
||||
assert acc.char_target_humidity.value == 35
|
||||
|
||||
# Set from HomeKit
|
||||
call_set_humidity = async_mock_service(hass, DOMAIN_CLIMATE, "set_humidity")
|
||||
|
||||
await hass.async_add_job(acc.char_target_humidity.client_update_value, 35)
|
||||
await hass.async_block_till_done()
|
||||
assert call_set_humidity[0]
|
||||
assert call_set_humidity[0].data[ATTR_ENTITY_ID] == entity_id
|
||||
assert call_set_humidity[0].data[ATTR_HUMIDITY] == 35
|
||||
assert acc.char_target_humidity.value == 35
|
||||
assert len(events) == 1
|
||||
assert events[-1].data[ATTR_VALUE] == "35%"
|
||||
|
||||
|
||||
async def test_thermostat_power_state(hass, hk_driver, cls, events):
|
||||
"""Test if accessory and HA are updated accordingly."""
|
||||
entity_id = "climate.test"
|
||||
|
@@ -99,7 +99,7 @@ async def test_reset_unloads_entry_if_setup(hass):
|
||||
|
||||
async def test_handle_unauthorized(hass):
|
||||
"""Test handling an unauthorized error on update."""
|
||||
entry = Mock()
|
||||
entry = Mock(async_setup=Mock(return_value=mock_coro(Mock())))
|
||||
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
|
||||
hue_bridge = bridge.HueBridge(hass, entry, False, False)
|
||||
|
||||
|
@@ -193,17 +193,15 @@ async def test_security_vuln_check(hass):
|
||||
entry = MockConfigEntry(domain=hue.DOMAIN, data={"host": "0.0.0.0"})
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
config = Mock(bridgeid="", mac="", modelid="BSB002", swversion="1935144020")
|
||||
config.name = "Hue"
|
||||
|
||||
with patch.object(
|
||||
hue,
|
||||
"HueBridge",
|
||||
Mock(
|
||||
return_value=Mock(
|
||||
async_setup=CoroutineMock(return_value=True),
|
||||
api=Mock(
|
||||
config=Mock(
|
||||
bridgeid="", mac="", modelid="BSB002", swversion="1935144020"
|
||||
)
|
||||
),
|
||||
async_setup=CoroutineMock(return_value=True), api=Mock(config=config)
|
||||
)
|
||||
),
|
||||
):
|
||||
|
@@ -893,7 +893,7 @@ async def test_group_features(hass, mock_bridge):
|
||||
"modelid": "LCT001",
|
||||
"swversion": "66009461",
|
||||
"manufacturername": "Philips",
|
||||
"uniqueid": "456",
|
||||
"uniqueid": "4567",
|
||||
}
|
||||
light_3 = {
|
||||
"state": {
|
||||
@@ -945,7 +945,7 @@ async def test_group_features(hass, mock_bridge):
|
||||
"modelid": "LCT001",
|
||||
"swversion": "66009461",
|
||||
"manufacturername": "Philips",
|
||||
"uniqueid": "123",
|
||||
"uniqueid": "1234",
|
||||
}
|
||||
light_response = {
|
||||
"1": light_1,
|
||||
|
@@ -11,6 +11,7 @@ import pytest
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import hue
|
||||
from homeassistant.components.hue import sensor_base as hue_sensor_base
|
||||
from homeassistant.components.hue.hue_event import CONF_HUE_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -241,6 +242,33 @@ UNSUPPORTED_SENSOR = {
|
||||
"uniqueid": "arbitrary",
|
||||
"recycle": True,
|
||||
}
|
||||
HUE_TAP_REMOTE_1 = {
|
||||
"state": {"buttonevent": 17, "lastupdated": "2019-06-22T14:43:50"},
|
||||
"swupdate": {"state": "notupdatable", "lastinstall": None},
|
||||
"config": {"on": True},
|
||||
"name": "Hue Tap",
|
||||
"type": "ZGPSwitch",
|
||||
"modelid": "ZGPSWITCH",
|
||||
"manufacturername": "Philips",
|
||||
"productname": "Hue tap switch",
|
||||
"diversityid": "d8cde5d5-0eef-4b95-b0f0-71ddd2952af4",
|
||||
"uniqueid": "00:00:00:00:00:44:23:08-f2",
|
||||
"capabilities": {"certified": True, "primary": True, "inputs": []},
|
||||
}
|
||||
HUE_DIMMER_REMOTE_1 = {
|
||||
"state": {"buttonevent": 4002, "lastupdated": "2019-12-28T21:58:02"},
|
||||
"swupdate": {"state": "noupdates", "lastinstall": "2019-10-13T13:16:15"},
|
||||
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
|
||||
"name": "Hue dimmer switch 1",
|
||||
"type": "ZLLSwitch",
|
||||
"modelid": "RWL021",
|
||||
"manufacturername": "Philips",
|
||||
"productname": "Hue dimmer switch",
|
||||
"diversityid": "73bbabea-3420-499a-9856-46bf437e119b",
|
||||
"swversion": "6.1.1.28573",
|
||||
"uniqueid": "00:17:88:01:10:3e:3a:dc-02-fc00",
|
||||
"capabilities": {"certified": True, "primary": True, "inputs": []},
|
||||
}
|
||||
SENSOR_RESPONSE = {
|
||||
"1": PRESENCE_SENSOR_1_PRESENT,
|
||||
"2": LIGHT_LEVEL_SENSOR_1,
|
||||
@@ -248,6 +276,8 @@ SENSOR_RESPONSE = {
|
||||
"4": PRESENCE_SENSOR_2_NOT_PRESENT,
|
||||
"5": LIGHT_LEVEL_SENSOR_2,
|
||||
"6": TEMPERATURE_SENSOR_2,
|
||||
"7": HUE_TAP_REMOTE_1,
|
||||
"8": HUE_DIMMER_REMOTE_1,
|
||||
}
|
||||
|
||||
|
||||
@@ -341,8 +371,8 @@ async def test_sensors_with_multiple_bridges(hass, mock_bridge):
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
assert len(mock_bridge_2.mock_requests) == 1
|
||||
# 3 "physical" sensors with 3 virtual sensors each
|
||||
assert len(hass.states.async_all()) == 9
|
||||
# 3 "physical" sensors with 3 virtual sensors each + 1 battery sensor
|
||||
assert len(hass.states.async_all()) == 10
|
||||
|
||||
|
||||
async def test_sensors(hass, mock_bridge):
|
||||
@@ -351,7 +381,7 @@ async def test_sensors(hass, mock_bridge):
|
||||
await setup_bridge(hass, mock_bridge)
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
# 2 "physical" sensors with 3 virtual sensors each
|
||||
assert len(hass.states.async_all()) == 6
|
||||
assert len(hass.states.async_all()) == 7
|
||||
|
||||
presence_sensor_1 = hass.states.get("binary_sensor.living_room_sensor_motion")
|
||||
light_level_sensor_1 = hass.states.get("sensor.living_room_sensor_light_level")
|
||||
@@ -377,6 +407,11 @@ async def test_sensors(hass, mock_bridge):
|
||||
assert temperature_sensor_2.state == "18.75"
|
||||
assert temperature_sensor_2.name == "Kitchen sensor temperature"
|
||||
|
||||
battery_remote_1 = hass.states.get("sensor.hue_dimmer_switch_1_battery_level")
|
||||
assert battery_remote_1 is not None
|
||||
assert battery_remote_1.state == "100"
|
||||
assert battery_remote_1.name == "Hue dimmer switch 1 battery level"
|
||||
|
||||
|
||||
async def test_unsupported_sensors(hass, mock_bridge):
|
||||
"""Test that unsupported sensors don't get added and don't fail."""
|
||||
@@ -385,8 +420,8 @@ async def test_unsupported_sensors(hass, mock_bridge):
|
||||
mock_bridge.mock_sensor_responses.append(response_with_unsupported)
|
||||
await setup_bridge(hass, mock_bridge)
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
# 2 "physical" sensors with 3 virtual sensors each
|
||||
assert len(hass.states.async_all()) == 6
|
||||
# 2 "physical" sensors with 3 virtual sensors each + 1 battery sensor
|
||||
assert len(hass.states.async_all()) == 7
|
||||
|
||||
|
||||
async def test_new_sensor_discovered(hass, mock_bridge):
|
||||
@@ -395,14 +430,14 @@ async def test_new_sensor_discovered(hass, mock_bridge):
|
||||
|
||||
await setup_bridge(hass, mock_bridge)
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
assert len(hass.states.async_all()) == 6
|
||||
assert len(hass.states.async_all()) == 7
|
||||
|
||||
new_sensor_response = dict(SENSOR_RESPONSE)
|
||||
new_sensor_response.update(
|
||||
{
|
||||
"7": PRESENCE_SENSOR_3_PRESENT,
|
||||
"8": LIGHT_LEVEL_SENSOR_3,
|
||||
"9": TEMPERATURE_SENSOR_3,
|
||||
"9": PRESENCE_SENSOR_3_PRESENT,
|
||||
"10": LIGHT_LEVEL_SENSOR_3,
|
||||
"11": TEMPERATURE_SENSOR_3,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -413,7 +448,7 @@ async def test_new_sensor_discovered(hass, mock_bridge):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 2
|
||||
assert len(hass.states.async_all()) == 9
|
||||
assert len(hass.states.async_all()) == 10
|
||||
|
||||
presence = hass.states.get("binary_sensor.bedroom_sensor_motion")
|
||||
assert presence is not None
|
||||
@@ -429,7 +464,7 @@ async def test_sensor_removed(hass, mock_bridge):
|
||||
|
||||
await setup_bridge(hass, mock_bridge)
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
assert len(hass.states.async_all()) == 6
|
||||
assert len(hass.states.async_all()) == 7
|
||||
|
||||
mock_bridge.mock_sensor_responses.clear()
|
||||
keys = ("1", "2", "3")
|
||||
@@ -466,3 +501,121 @@ async def test_update_unauthorized(hass, mock_bridge):
|
||||
assert len(mock_bridge.mock_requests) == 0
|
||||
assert len(hass.states.async_all()) == 0
|
||||
assert len(mock_bridge.handle_unauthorized_error.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_hue_events(hass, mock_bridge):
|
||||
"""Test that hue remotes fire events when pressed."""
|
||||
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
|
||||
|
||||
mock_listener = Mock()
|
||||
unsub = hass.bus.async_listen(CONF_HUE_EVENT, mock_listener)
|
||||
|
||||
await setup_bridge(hass, mock_bridge)
|
||||
assert len(mock_bridge.mock_requests) == 1
|
||||
assert len(hass.states.async_all()) == 7
|
||||
assert len(mock_listener.mock_calls) == 0
|
||||
|
||||
new_sensor_response = dict(SENSOR_RESPONSE)
|
||||
new_sensor_response["7"]["state"] = {
|
||||
"buttonevent": 18,
|
||||
"lastupdated": "2019-12-28T22:58:02",
|
||||
}
|
||||
mock_bridge.mock_sensor_responses.append(new_sensor_response)
|
||||
|
||||
# Force updates to run again
|
||||
await mock_bridge.sensor_manager.coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 2
|
||||
assert len(hass.states.async_all()) == 7
|
||||
assert len(mock_listener.mock_calls) == 1
|
||||
assert mock_listener.mock_calls[0][1][0].data == {
|
||||
"id": "hue_tap",
|
||||
"unique_id": "00:00:00:00:00:44:23:08-f2",
|
||||
"event": 18,
|
||||
"last_updated": "2019-12-28T22:58:02",
|
||||
}
|
||||
|
||||
new_sensor_response = dict(new_sensor_response)
|
||||
new_sensor_response["8"]["state"] = {
|
||||
"buttonevent": 3002,
|
||||
"lastupdated": "2019-12-28T22:58:01",
|
||||
}
|
||||
mock_bridge.mock_sensor_responses.append(new_sensor_response)
|
||||
|
||||
# Force updates to run again
|
||||
await mock_bridge.sensor_manager.coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 3
|
||||
assert len(hass.states.async_all()) == 7
|
||||
assert len(mock_listener.mock_calls) == 2
|
||||
assert mock_listener.mock_calls[1][1][0].data == {
|
||||
"id": "hue_dimmer_switch_1",
|
||||
"unique_id": "00:17:88:01:10:3e:3a:dc-02-fc00",
|
||||
"event": 3002,
|
||||
"last_updated": "2019-12-28T22:58:01",
|
||||
}
|
||||
|
||||
# Add a new remote. In discovery the new event is registered **but not fired**
|
||||
new_sensor_response = dict(new_sensor_response)
|
||||
new_sensor_response["21"] = {
|
||||
"state": {
|
||||
"rotaryevent": 2,
|
||||
"expectedrotation": 208,
|
||||
"expectedeventduration": 400,
|
||||
"lastupdated": "2020-01-31T15:56:19",
|
||||
},
|
||||
"swupdate": {"state": "noupdates", "lastinstall": "2019-11-26T03:35:21"},
|
||||
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
|
||||
"name": "Lutron Aurora 1",
|
||||
"type": "ZLLRelativeRotary",
|
||||
"modelid": "Z3-1BRL",
|
||||
"manufacturername": "Lutron",
|
||||
"productname": "Lutron Aurora",
|
||||
"diversityid": "2c3a75ff-55c4-4e4d-8c44-82d330b8eb9b",
|
||||
"swversion": "3.4",
|
||||
"uniqueid": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
|
||||
"capabilities": {
|
||||
"certified": True,
|
||||
"primary": True,
|
||||
"inputs": [
|
||||
{
|
||||
"repeatintervals": [400],
|
||||
"events": [
|
||||
{"rotaryevent": 1, "eventtype": "start"},
|
||||
{"rotaryevent": 2, "eventtype": "repeat"},
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
mock_bridge.mock_sensor_responses.append(new_sensor_response)
|
||||
|
||||
# Force updates to run again
|
||||
await mock_bridge.sensor_manager.coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 8
|
||||
assert len(mock_listener.mock_calls) == 2
|
||||
|
||||
# A new press fires the event
|
||||
new_sensor_response["21"]["state"]["lastupdated"] = "2020-01-31T15:57:19"
|
||||
mock_bridge.mock_sensor_responses.append(new_sensor_response)
|
||||
|
||||
# Force updates to run again
|
||||
await mock_bridge.sensor_manager.coordinator.async_refresh()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_bridge.mock_requests) == 5
|
||||
assert len(hass.states.async_all()) == 8
|
||||
assert len(mock_listener.mock_calls) == 3
|
||||
assert mock_listener.mock_calls[2][1][0].data == {
|
||||
"id": "lutron_aurora_1",
|
||||
"unique_id": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
|
||||
"event": 2,
|
||||
"last_updated": "2020-01-31T15:57:19",
|
||||
}
|
||||
|
||||
unsub()
|
||||
|
@@ -450,6 +450,7 @@ async def test_import_existing_config(hass, mock_panel):
|
||||
"alarm1": "Switchable Output",
|
||||
},
|
||||
"blink": True,
|
||||
"api_host": "",
|
||||
"discovery": True,
|
||||
"binary_sensors": [
|
||||
{"zone": "2", "type": "door", "inverse": False},
|
||||
@@ -628,6 +629,7 @@ async def test_import_pin_config(hass, mock_panel):
|
||||
"out": "Switchable Output",
|
||||
},
|
||||
"blink": True,
|
||||
"api_host": "",
|
||||
"discovery": True,
|
||||
"binary_sensors": [
|
||||
{"zone": "1", "type": "door", "inverse": False},
|
||||
@@ -778,9 +780,21 @@ async def test_option_flow(hass, mock_panel):
|
||||
|
||||
assert result["type"] == "form"
|
||||
assert result["step_id"] == "options_misc"
|
||||
|
||||
# make sure we enforce url format
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"], user_input={"blink": True},
|
||||
result["flow_id"],
|
||||
user_input={"blink": True, "override_api_host": True, "api_host": "badhosturl"},
|
||||
)
|
||||
|
||||
assert result["type"] == "form"
|
||||
assert result["step_id"] == "options_misc"
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
"blink": True,
|
||||
"override_api_host": True,
|
||||
"api_host": "http://overridehost:1111",
|
||||
},
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"] == {
|
||||
@@ -792,6 +806,7 @@ async def test_option_flow(hass, mock_panel):
|
||||
"out": "Switchable Output",
|
||||
},
|
||||
"blink": True,
|
||||
"api_host": "http://overridehost:1111",
|
||||
"binary_sensors": [
|
||||
{"zone": "2", "type": "door", "inverse": False},
|
||||
{"zone": "6", "type": "window", "name": "winder", "inverse": True},
|
||||
@@ -958,7 +973,7 @@ async def test_option_flow_pro(hass, mock_panel):
|
||||
assert result["step_id"] == "options_misc"
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"], user_input={"blink": True},
|
||||
result["flow_id"], user_input={"blink": True, "override_api_host": False},
|
||||
)
|
||||
|
||||
assert result["type"] == "create_entry"
|
||||
@@ -976,6 +991,7 @@ async def test_option_flow_pro(hass, mock_panel):
|
||||
"out1": "Switchable Output",
|
||||
},
|
||||
"blink": True,
|
||||
"api_host": "",
|
||||
"binary_sensors": [
|
||||
{"zone": "2", "type": "door", "inverse": False},
|
||||
{"zone": "6", "type": "window", "name": "winder", "inverse": True},
|
||||
@@ -1121,7 +1137,7 @@ async def test_option_flow_import(hass, mock_panel):
|
||||
schema = result["data_schema"]({})
|
||||
assert schema["blink"] is True
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"], user_input={"blink": False},
|
||||
result["flow_id"], user_input={"blink": False, "override_api_host": False},
|
||||
)
|
||||
|
||||
# verify the updated fields
|
||||
@@ -1129,6 +1145,7 @@ async def test_option_flow_import(hass, mock_panel):
|
||||
assert result["data"] == {
|
||||
"io": {"1": "Binary Sensor", "2": "Digital Sensor", "3": "Switchable Output"},
|
||||
"blink": False,
|
||||
"api_host": "",
|
||||
"binary_sensors": [
|
||||
{"zone": "1", "type": "door", "inverse": True, "name": "winder"},
|
||||
],
|
||||
|
@@ -43,6 +43,7 @@ async def test_config_schema(hass):
|
||||
"""Test that config schema is imported properly."""
|
||||
config = {
|
||||
konnected.DOMAIN: {
|
||||
konnected.CONF_API_HOST: "http://1.1.1.1:8888",
|
||||
konnected.CONF_ACCESS_TOKEN: "abcdefgh",
|
||||
konnected.CONF_DEVICES: [{konnected.CONF_ID: "aabbccddeeff"}],
|
||||
}
|
||||
@@ -50,10 +51,12 @@ async def test_config_schema(hass):
|
||||
assert konnected.CONFIG_SCHEMA(config) == {
|
||||
"konnected": {
|
||||
"access_token": "abcdefgh",
|
||||
"api_host": "http://1.1.1.1:8888",
|
||||
"devices": [
|
||||
{
|
||||
"default_options": {
|
||||
"blink": True,
|
||||
"api_host": "http://1.1.1.1:8888",
|
||||
"discovery": True,
|
||||
"io": {
|
||||
"1": "Disabled",
|
||||
@@ -96,6 +99,7 @@ async def test_config_schema(hass):
|
||||
{
|
||||
"default_options": {
|
||||
"blink": True,
|
||||
"api_host": "",
|
||||
"discovery": True,
|
||||
"io": {
|
||||
"1": "Disabled",
|
||||
@@ -162,6 +166,7 @@ async def test_config_schema(hass):
|
||||
{
|
||||
"default_options": {
|
||||
"blink": True,
|
||||
"api_host": "",
|
||||
"discovery": True,
|
||||
"io": {
|
||||
"1": "Binary Sensor",
|
||||
|
20
tests/components/myq/test_binary_sensor.py
Normal file
20
tests/components/myq/test_binary_sensor.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""The scene tests for the myq platform."""
|
||||
|
||||
from homeassistant.const import STATE_ON
|
||||
|
||||
from .util import async_init_integration
|
||||
|
||||
|
||||
async def test_create_binary_sensors(hass):
|
||||
"""Test creation of binary_sensors."""
|
||||
|
||||
await async_init_integration(hass)
|
||||
|
||||
state = hass.states.get("binary_sensor.happy_place_myq_gateway")
|
||||
assert state.state == STATE_ON
|
||||
expected_attributes = {"device_class": "connectivity"}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(
|
||||
state.attributes[key] == expected_attributes[key] for key in expected_attributes
|
||||
)
|
50
tests/components/myq/test_cover.py
Normal file
50
tests/components/myq/test_cover.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""The scene tests for the myq platform."""
|
||||
|
||||
from homeassistant.const import STATE_CLOSED
|
||||
|
||||
from .util import async_init_integration
|
||||
|
||||
|
||||
async def test_create_covers(hass):
|
||||
"""Test creation of covers."""
|
||||
|
||||
await async_init_integration(hass)
|
||||
|
||||
state = hass.states.get("cover.large_garage_door")
|
||||
assert state.state == STATE_CLOSED
|
||||
expected_attributes = {
|
||||
"device_class": "garage",
|
||||
"friendly_name": "Large Garage Door",
|
||||
"supported_features": 3,
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(
|
||||
state.attributes[key] == expected_attributes[key] for key in expected_attributes
|
||||
)
|
||||
|
||||
state = hass.states.get("cover.small_garage_door")
|
||||
assert state.state == STATE_CLOSED
|
||||
expected_attributes = {
|
||||
"device_class": "garage",
|
||||
"friendly_name": "Small Garage Door",
|
||||
"supported_features": 3,
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(
|
||||
state.attributes[key] == expected_attributes[key] for key in expected_attributes
|
||||
)
|
||||
|
||||
state = hass.states.get("cover.gate")
|
||||
assert state.state == STATE_CLOSED
|
||||
expected_attributes = {
|
||||
"device_class": "gate",
|
||||
"friendly_name": "Gate",
|
||||
"supported_features": 3,
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(
|
||||
state.attributes[key] == expected_attributes[key] for key in expected_attributes
|
||||
)
|
42
tests/components/myq/util.py
Normal file
42
tests/components/myq/util.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Tests for the myq integration."""
|
||||
|
||||
import json
|
||||
|
||||
from asynctest import patch
|
||||
|
||||
from homeassistant.components.myq.const import DOMAIN
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, load_fixture
|
||||
|
||||
|
||||
async def async_init_integration(
|
||||
hass: HomeAssistant, skip_setup: bool = False,
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the myq integration in Home Assistant."""
|
||||
|
||||
devices_fixture = "myq/devices.json"
|
||||
devices_json = load_fixture(devices_fixture)
|
||||
devices_dict = json.loads(devices_json)
|
||||
|
||||
def _handle_mock_api_request(method, endpoint, **kwargs):
|
||||
if endpoint == "Login":
|
||||
return {"SecurityToken": 1234}
|
||||
elif endpoint == "My":
|
||||
return {"Account": {"Id": 1}}
|
||||
elif endpoint == "Accounts/1/Devices":
|
||||
return devices_dict
|
||||
return {}
|
||||
|
||||
with patch("pymyq.api.API.request", side_effect=_handle_mock_api_request):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
if not skip_setup:
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return entry
|
@@ -23,6 +23,7 @@ def _get_mock_thermostat_run():
|
||||
schedule_mode=SCHEDULE_RUN,
|
||||
target_celsius=22,
|
||||
target_fahrenheit=72,
|
||||
target_temperature=2217,
|
||||
)
|
||||
|
||||
thermostat.get_data = Mock()
|
||||
@@ -48,6 +49,7 @@ def _get_mock_thermostat_schedule_hold_unavailable():
|
||||
schedule_mode=SCHEDULE_HOLD,
|
||||
target_celsius=23,
|
||||
target_fahrenheit=79,
|
||||
target_temperature=2609,
|
||||
)
|
||||
|
||||
thermostat.get_data = Mock()
|
||||
@@ -73,6 +75,7 @@ def _get_mock_thermostat_schedule_hold_available():
|
||||
schedule_mode=SCHEDULE_HOLD,
|
||||
target_celsius=23,
|
||||
target_fahrenheit=79,
|
||||
target_temperature=2609,
|
||||
)
|
||||
|
||||
thermostat.get_data = Mock()
|
||||
@@ -98,6 +101,7 @@ def _get_mock_thermostat_schedule_temporary_hold():
|
||||
schedule_mode=SCHEDULE_TEMPORARY_HOLD,
|
||||
target_celsius=43,
|
||||
target_fahrenheit=99,
|
||||
target_temperature=3729,
|
||||
)
|
||||
|
||||
thermostat.get_data = Mock()
|
||||
|
@@ -16,17 +16,28 @@ async def _mock_powerwall_with_fixtures(hass):
|
||||
meters = await _async_load_json_fixture(hass, "meters.json")
|
||||
sitemaster = await _async_load_json_fixture(hass, "sitemaster.json")
|
||||
site_info = await _async_load_json_fixture(hass, "site_info.json")
|
||||
status = await _async_load_json_fixture(hass, "status.json")
|
||||
device_type = await _async_load_json_fixture(hass, "device_type.json")
|
||||
|
||||
return _mock_powerwall_return_value(
|
||||
site_info=site_info,
|
||||
charge=47.31993232,
|
||||
sitemaster=sitemaster,
|
||||
meters=meters,
|
||||
grid_status="SystemGridConnected",
|
||||
status=status,
|
||||
device_type=device_type,
|
||||
)
|
||||
|
||||
|
||||
def _mock_powerwall_return_value(
|
||||
site_info=None, charge=None, sitemaster=None, meters=None, grid_status=None
|
||||
site_info=None,
|
||||
charge=None,
|
||||
sitemaster=None,
|
||||
meters=None,
|
||||
grid_status=None,
|
||||
status=None,
|
||||
device_type=None,
|
||||
):
|
||||
powerwall_mock = MagicMock()
|
||||
type(powerwall_mock).site_info = PropertyMock(return_value=site_info)
|
||||
@@ -34,6 +45,8 @@ def _mock_powerwall_return_value(
|
||||
type(powerwall_mock).sitemaster = PropertyMock(return_value=sitemaster)
|
||||
type(powerwall_mock).meters = PropertyMock(return_value=meters)
|
||||
type(powerwall_mock).grid_status = PropertyMock(return_value=grid_status)
|
||||
type(powerwall_mock).status = PropertyMock(return_value=status)
|
||||
type(powerwall_mock).device_type = PropertyMock(return_value=device_type)
|
||||
|
||||
return powerwall_mock
|
||||
|
||||
|
@@ -27,7 +27,8 @@ async def test_sensors(hass):
|
||||
identifiers={("powerwall", "Wom Energy_60Hz_240V_s_IEEE1547a_2014_13.5")},
|
||||
connections=set(),
|
||||
)
|
||||
assert reg_device.model == "PowerWall 2"
|
||||
assert reg_device.model == "PowerWall 2 (hec)"
|
||||
assert reg_device.sw_version == "1.45.1"
|
||||
assert reg_device.manufacturer == "Tesla"
|
||||
assert reg_device.name == "MySite"
|
||||
|
||||
|
@@ -57,3 +57,32 @@ async def test_heater(hass):
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
|
||||
|
||||
async def test_smartac_with_swing(hass):
|
||||
"""Test creation of smart ac with swing climate."""
|
||||
|
||||
await async_init_integration(hass)
|
||||
|
||||
state = hass.states.get("climate.air_conditioning_with_swing")
|
||||
assert state.state == "auto"
|
||||
|
||||
expected_attributes = {
|
||||
"current_humidity": 42.3,
|
||||
"current_temperature": 20.9,
|
||||
"fan_mode": "auto",
|
||||
"fan_modes": ["auto", "high", "medium", "low"],
|
||||
"friendly_name": "Air Conditioning with swing",
|
||||
"hvac_action": "heating",
|
||||
"hvac_modes": ["off", "auto", "heat", "cool", "heat_cool", "dry", "fan_only"],
|
||||
"max_temp": 30.0,
|
||||
"min_temp": 16.0,
|
||||
"preset_mode": "home",
|
||||
"preset_modes": ["away", "home"],
|
||||
"supported_features": 57,
|
||||
"target_temp_step": 1.0,
|
||||
"temperature": 20.0,
|
||||
}
|
||||
# Only test for a subset of attributes in case
|
||||
# HA changes the implementation and a new one appears
|
||||
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
||||
|
@@ -19,6 +19,11 @@ async def async_init_integration(
|
||||
devices_fixture = "tado/devices.json"
|
||||
me_fixture = "tado/me.json"
|
||||
zones_fixture = "tado/zones.json"
|
||||
|
||||
# Smart AC with Swing
|
||||
zone_5_state_fixture = "tado/smartac3.with_swing.json"
|
||||
zone_5_capabilities_fixture = "tado/zone_with_swing_capabilities.json"
|
||||
|
||||
# Water Heater 2
|
||||
zone_4_state_fixture = "tado/tadov2.water_heater.heating.json"
|
||||
zone_4_capabilities_fixture = "tado/water_heater_zone_capabilities.json"
|
||||
@@ -31,6 +36,7 @@ async def async_init_integration(
|
||||
zone_2_state_fixture = "tado/tadov2.water_heater.auto_mode.json"
|
||||
zone_2_capabilities_fixture = "tado/water_heater_zone_capabilities.json"
|
||||
|
||||
# Tado V2 with manual heating
|
||||
zone_1_state_fixture = "tado/tadov2.heating.manual_mode.json"
|
||||
zone_1_capabilities_fixture = "tado/tadov2.zone_capabilities.json"
|
||||
|
||||
@@ -47,6 +53,10 @@ async def async_init_integration(
|
||||
"https://my.tado.com/api/v2/homes/1/zones",
|
||||
text=load_fixture(zones_fixture),
|
||||
)
|
||||
m.get(
|
||||
"https://my.tado.com/api/v2/homes/1/zones/5/capabilities",
|
||||
text=load_fixture(zone_5_capabilities_fixture),
|
||||
)
|
||||
m.get(
|
||||
"https://my.tado.com/api/v2/homes/1/zones/4/capabilities",
|
||||
text=load_fixture(zone_4_capabilities_fixture),
|
||||
@@ -63,6 +73,10 @@ async def async_init_integration(
|
||||
"https://my.tado.com/api/v2/homes/1/zones/1/capabilities",
|
||||
text=load_fixture(zone_1_capabilities_fixture),
|
||||
)
|
||||
m.get(
|
||||
"https://my.tado.com/api/v2/homes/1/zones/5/state",
|
||||
text=load_fixture(zone_5_state_fixture),
|
||||
)
|
||||
m.get(
|
||||
"https://my.tado.com/api/v2/homes/1/zones/4/state",
|
||||
text=load_fixture(zone_4_state_fixture),
|
||||
|
22
tests/fixtures/directv/info-get-locations.json
vendored
Normal file
22
tests/fixtures/directv/info-get-locations.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"locations": [
|
||||
{
|
||||
"clientAddr": "0",
|
||||
"locationName": "Host"
|
||||
},
|
||||
{
|
||||
"clientAddr": "2CA17D1CD30X",
|
||||
"locationName": "Client"
|
||||
},
|
||||
{
|
||||
"clientAddr": "9XXXXXXXXXX9",
|
||||
"locationName": "Unavailable Client"
|
||||
}
|
||||
],
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/info/getLocations?callback=jsonp"
|
||||
}
|
||||
}
|
13
tests/fixtures/directv/info-get-version.json
vendored
Normal file
13
tests/fixtures/directv/info-get-version.json
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"accessCardId": "0021-1495-6572",
|
||||
"receiverId": "0288 7745 5858",
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK",
|
||||
"query": "/info/getVersion"
|
||||
},
|
||||
"stbSoftwareVersion": "0x4ed7",
|
||||
"systemTime": 1281625203,
|
||||
"version": "1.2"
|
||||
}
|
8
tests/fixtures/directv/info-mode-error.json
vendored
Normal file
8
tests/fixtures/directv/info-mode-error.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"status": {
|
||||
"code": 500,
|
||||
"commandResult": 1,
|
||||
"msg": "Internal Server Error.",
|
||||
"query": "/info/mode"
|
||||
}
|
||||
}
|
9
tests/fixtures/directv/info-mode.json
vendored
Normal file
9
tests/fixtures/directv/info-mode.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"mode": 0,
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK",
|
||||
"query": "/info/mode"
|
||||
}
|
||||
}
|
10
tests/fixtures/directv/remote-process-key.json
vendored
Normal file
10
tests/fixtures/directv/remote-process-key.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"hold": "keyPress",
|
||||
"key": "ANY",
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK",
|
||||
"query": "/remote/processKey?key=ANY&hold=keyPress"
|
||||
}
|
||||
}
|
24
tests/fixtures/directv/tv-get-tuned-movie.json
vendored
Normal file
24
tests/fixtures/directv/tv-get-tuned-movie.json
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"callsign": "HALLHD",
|
||||
"date": "2013",
|
||||
"duration": 7200,
|
||||
"isOffAir": false,
|
||||
"isPclocked": 3,
|
||||
"isPpv": false,
|
||||
"isRecording": false,
|
||||
"isVod": false,
|
||||
"major": 312,
|
||||
"minor": 65535,
|
||||
"offset": 4437,
|
||||
"programId": "17016356",
|
||||
"rating": "TV-G",
|
||||
"startTime": 1584795600,
|
||||
"stationId": 6580971,
|
||||
"title": "Snow Bride",
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/tv/getTuned"
|
||||
}
|
||||
}
|
32
tests/fixtures/directv/tv-get-tuned.json
vendored
Normal file
32
tests/fixtures/directv/tv-get-tuned.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"callsign": "FOODHD",
|
||||
"date": "20070324",
|
||||
"duration": 1791,
|
||||
"episodeTitle": "Spaghetti and Clam Sauce",
|
||||
"expiration": "0",
|
||||
"expiryTime": 0,
|
||||
"isOffAir": false,
|
||||
"isPartial": false,
|
||||
"isPclocked": 1,
|
||||
"isPpv": false,
|
||||
"isRecording": false,
|
||||
"isViewed": true,
|
||||
"isVod": false,
|
||||
"keepUntilFull": true,
|
||||
"major": 231,
|
||||
"minor": 65535,
|
||||
"offset": 263,
|
||||
"programId": "4405732",
|
||||
"rating": "No Rating",
|
||||
"recType": 3,
|
||||
"startTime": 1278342008,
|
||||
"stationId": 3900976,
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK.",
|
||||
"query": "/tv/getTuned"
|
||||
},
|
||||
"title": "Tyler's Ultimate",
|
||||
"uniqueId": "6728716739474078694"
|
||||
}
|
8
tests/fixtures/directv/tv-tune.json
vendored
Normal file
8
tests/fixtures/directv/tv-tune.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"status": {
|
||||
"code": 200,
|
||||
"commandResult": 0,
|
||||
"msg": "OK",
|
||||
"query": "/tv/tune?major=508"
|
||||
}
|
||||
}
|
133
tests/fixtures/myq/devices.json
vendored
Normal file
133
tests/fixtures/myq/devices.json
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
{
|
||||
"count" : 4,
|
||||
"href" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices",
|
||||
"items" : [
|
||||
{
|
||||
"device_type" : "ethernetgateway",
|
||||
"created_date" : "2020-02-10T22:54:58.423",
|
||||
"href" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gateway_serial",
|
||||
"device_family" : "gateway",
|
||||
"name" : "Happy place",
|
||||
"device_platform" : "myq",
|
||||
"state" : {
|
||||
"homekit_enabled" : false,
|
||||
"pending_bootload_abandoned" : false,
|
||||
"online" : true,
|
||||
"last_status" : "2020-03-30T02:49:46.4121303Z",
|
||||
"physical_devices" : [],
|
||||
"firmware_version" : "1.6",
|
||||
"learn_mode" : false,
|
||||
"learn" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gateway_serial/learn",
|
||||
"homekit_capable" : false,
|
||||
"updated_date" : "2020-03-30T02:49:46.4171299Z"
|
||||
},
|
||||
"serial_number" : "gateway_serial"
|
||||
},
|
||||
{
|
||||
"serial_number" : "gate_serial",
|
||||
"state" : {
|
||||
"report_ajar" : false,
|
||||
"aux_relay_delay" : "00:00:00",
|
||||
"is_unattended_close_allowed" : true,
|
||||
"door_ajar_interval" : "00:00:00",
|
||||
"aux_relay_behavior" : "None",
|
||||
"last_status" : "2020-03-30T02:47:40.2794038Z",
|
||||
"online" : true,
|
||||
"rex_fires_door" : false,
|
||||
"close" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gate_serial/close",
|
||||
"invalid_shutout_period" : "00:00:00",
|
||||
"invalid_credential_window" : "00:00:00",
|
||||
"use_aux_relay" : false,
|
||||
"command_channel_report_status" : false,
|
||||
"last_update" : "2020-03-28T23:07:39.5611776Z",
|
||||
"door_state" : "closed",
|
||||
"max_invalid_attempts" : 0,
|
||||
"open" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gate_serial/open",
|
||||
"passthrough_interval" : "00:00:00",
|
||||
"control_from_browser" : false,
|
||||
"report_forced" : false,
|
||||
"is_unattended_open_allowed" : true
|
||||
},
|
||||
"parent_device_id" : "gateway_serial",
|
||||
"name" : "Gate",
|
||||
"device_platform" : "myq",
|
||||
"device_family" : "garagedoor",
|
||||
"parent_device" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gateway_serial",
|
||||
"href" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gate_serial",
|
||||
"device_type" : "gate",
|
||||
"created_date" : "2020-02-10T22:54:58.423"
|
||||
},
|
||||
{
|
||||
"parent_device" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gateway_serial",
|
||||
"href" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/large_garage_serial",
|
||||
"device_type" : "wifigaragedooropener",
|
||||
"created_date" : "2020-02-10T22:55:25.863",
|
||||
"device_platform" : "myq",
|
||||
"name" : "Large Garage Door",
|
||||
"device_family" : "garagedoor",
|
||||
"serial_number" : "large_garage_serial",
|
||||
"state" : {
|
||||
"report_forced" : false,
|
||||
"is_unattended_open_allowed" : true,
|
||||
"passthrough_interval" : "00:00:00",
|
||||
"control_from_browser" : false,
|
||||
"attached_work_light_error_present" : false,
|
||||
"max_invalid_attempts" : 0,
|
||||
"open" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/large_garage_serial/open",
|
||||
"command_channel_report_status" : false,
|
||||
"last_update" : "2020-03-28T23:58:55.5906643Z",
|
||||
"door_state" : "closed",
|
||||
"invalid_shutout_period" : "00:00:00",
|
||||
"use_aux_relay" : false,
|
||||
"invalid_credential_window" : "00:00:00",
|
||||
"rex_fires_door" : false,
|
||||
"close" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/large_garage_serial/close",
|
||||
"online" : true,
|
||||
"last_status" : "2020-03-30T02:49:46.4121303Z",
|
||||
"aux_relay_behavior" : "None",
|
||||
"door_ajar_interval" : "00:00:00",
|
||||
"gdo_lock_connected" : false,
|
||||
"report_ajar" : false,
|
||||
"aux_relay_delay" : "00:00:00",
|
||||
"is_unattended_close_allowed" : true
|
||||
},
|
||||
"parent_device_id" : "gateway_serial"
|
||||
},
|
||||
{
|
||||
"serial_number" : "small_garage_serial",
|
||||
"state" : {
|
||||
"last_status" : "2020-03-30T02:48:45.7501595Z",
|
||||
"online" : true,
|
||||
"report_ajar" : false,
|
||||
"aux_relay_delay" : "00:00:00",
|
||||
"is_unattended_close_allowed" : true,
|
||||
"gdo_lock_connected" : false,
|
||||
"door_ajar_interval" : "00:00:00",
|
||||
"aux_relay_behavior" : "None",
|
||||
"attached_work_light_error_present" : false,
|
||||
"control_from_browser" : false,
|
||||
"passthrough_interval" : "00:00:00",
|
||||
"is_unattended_open_allowed" : true,
|
||||
"report_forced" : false,
|
||||
"close" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/small_garage_serial/close",
|
||||
"rex_fires_door" : false,
|
||||
"invalid_credential_window" : "00:00:00",
|
||||
"use_aux_relay" : false,
|
||||
"invalid_shutout_period" : "00:00:00",
|
||||
"door_state" : "closed",
|
||||
"last_update" : "2020-03-26T15:45:31.4713796Z",
|
||||
"command_channel_report_status" : false,
|
||||
"open" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/small_garage_serial/open",
|
||||
"max_invalid_attempts" : 0
|
||||
},
|
||||
"parent_device_id" : "gateway_serial",
|
||||
"device_platform" : "myq",
|
||||
"name" : "Small Garage Door",
|
||||
"device_family" : "garagedoor",
|
||||
"parent_device" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/gateway_serial",
|
||||
"href" : "http://api.myqdevice.com/api/v5/accounts/account_id/devices/small_garage_serial",
|
||||
"device_type" : "wifigaragedooropener",
|
||||
"created_date" : "2020-02-10T23:11:47.487"
|
||||
}
|
||||
]
|
||||
}
|
1
tests/fixtures/powerwall/device_type.json
vendored
Normal file
1
tests/fixtures/powerwall/device_type.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"device_type":"hec"}
|
1
tests/fixtures/powerwall/status.json
vendored
Normal file
1
tests/fixtures/powerwall/status.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"start_time":"2020-03-10 11:57:25 +0800","up_time_seconds":"217h40m57.470801079s","is_new":false,"version":"1.45.1","git_hash":"13bf684a633175f884079ec79f42997080d90310"}
|
64
tests/fixtures/tado/smartac3.with_swing.json
vendored
Normal file
64
tests/fixtures/tado/smartac3.with_swing.json
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
"tadoMode": "HOME",
|
||||
"geolocationOverride": false,
|
||||
"geolocationOverrideDisableTime": null,
|
||||
"preparation": null,
|
||||
"setting": {
|
||||
"type": "AIR_CONDITIONING",
|
||||
"power": "ON",
|
||||
"mode": "HEAT",
|
||||
"temperature": {
|
||||
"celsius": 20.00,
|
||||
"fahrenheit": 68.00
|
||||
},
|
||||
"fanSpeed": "AUTO",
|
||||
"swing": "ON"
|
||||
},
|
||||
"overlayType": null,
|
||||
"overlay": null,
|
||||
"openWindow": null,
|
||||
"nextScheduleChange": {
|
||||
"start": "2020-03-28T04:30:00Z",
|
||||
"setting": {
|
||||
"type": "AIR_CONDITIONING",
|
||||
"power": "ON",
|
||||
"mode": "HEAT",
|
||||
"temperature": {
|
||||
"celsius": 23.00,
|
||||
"fahrenheit": 73.40
|
||||
},
|
||||
"fanSpeed": "AUTO",
|
||||
"swing": "ON"
|
||||
}
|
||||
},
|
||||
"nextTimeBlock": {
|
||||
"start": "2020-03-28T04:30:00.000Z"
|
||||
},
|
||||
"link": {
|
||||
"state": "ONLINE"
|
||||
},
|
||||
"activityDataPoints": {
|
||||
"acPower": {
|
||||
"timestamp": "2020-03-27T23:02:22.260Z",
|
||||
"type": "POWER",
|
||||
"value": "ON"
|
||||
}
|
||||
},
|
||||
"sensorDataPoints": {
|
||||
"insideTemperature": {
|
||||
"celsius": 20.88,
|
||||
"fahrenheit": 69.58,
|
||||
"timestamp": "2020-03-28T02:09:27.830Z",
|
||||
"type": "TEMPERATURE",
|
||||
"precision": {
|
||||
"celsius": 0.1,
|
||||
"fahrenheit": 0.1
|
||||
}
|
||||
},
|
||||
"humidity": {
|
||||
"type": "PERCENTAGE",
|
||||
"percentage": 42.30,
|
||||
"timestamp": "2020-03-28T02:09:27.830Z"
|
||||
}
|
||||
}
|
||||
}
|
46
tests/fixtures/tado/zone_with_swing_capabilities.json
vendored
Normal file
46
tests/fixtures/tado/zone_with_swing_capabilities.json
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"type": "AIR_CONDITIONING",
|
||||
"AUTO": {
|
||||
"fanSpeeds": ["AUTO", "HIGH", "MIDDLE", "LOW"],
|
||||
"swings": ["OFF", "ON"]
|
||||
},
|
||||
"COOL": {
|
||||
"temperatures": {
|
||||
"celsius": {
|
||||
"min": 18,
|
||||
"max": 30,
|
||||
"step": 1.0
|
||||
},
|
||||
"fahrenheit": {
|
||||
"min": 64,
|
||||
"max": 86,
|
||||
"step": 1.0
|
||||
}
|
||||
},
|
||||
"fanSpeeds": ["AUTO", "HIGH", "MIDDLE", "LOW"],
|
||||
"swings": ["OFF", "ON"]
|
||||
},
|
||||
"DRY": {
|
||||
"swings": ["OFF", "ON"]
|
||||
},
|
||||
"FAN": {
|
||||
"fanSpeeds": ["AUTO", "HIGH", "MIDDLE", "LOW"],
|
||||
"swings": ["OFF", "ON"]
|
||||
},
|
||||
"HEAT": {
|
||||
"temperatures": {
|
||||
"celsius": {
|
||||
"min": 16,
|
||||
"max": 30,
|
||||
"step": 1.0
|
||||
},
|
||||
"fahrenheit": {
|
||||
"min": 61,
|
||||
"max": 86,
|
||||
"step": 1.0
|
||||
}
|
||||
},
|
||||
"fanSpeeds": ["AUTO", "HIGH", "MIDDLE", "LOW"],
|
||||
"swings": ["OFF", "ON"]
|
||||
}
|
||||
}
|
48
tests/fixtures/tado/zones.json
vendored
48
tests/fixtures/tado/zones.json
vendored
@@ -175,5 +175,53 @@
|
||||
},
|
||||
"id" : 4,
|
||||
"supportsDazzle" : true
|
||||
},
|
||||
{
|
||||
"dazzleMode" : {
|
||||
"supported" : true,
|
||||
"enabled" : true
|
||||
},
|
||||
"name" : "Air Conditioning with swing",
|
||||
"id" : 5,
|
||||
"supportsDazzle" : true,
|
||||
"devices" : [
|
||||
{
|
||||
"deviceType" : "WR02",
|
||||
"shortSerialNo" : "WR4",
|
||||
"serialNo" : "WR4",
|
||||
"commandTableUploadState" : "FINISHED",
|
||||
"duties" : [
|
||||
"ZONE_UI",
|
||||
"ZONE_DRIVER",
|
||||
"ZONE_LEADER"
|
||||
],
|
||||
"currentFwVersion" : "59.4",
|
||||
"characteristics" : {
|
||||
"capabilities" : [
|
||||
"INSIDE_TEMPERATURE_MEASUREMENT",
|
||||
"IDENTIFY"
|
||||
]
|
||||
},
|
||||
"accessPointWiFi" : {
|
||||
"ssid" : "tado8480"
|
||||
},
|
||||
"connectionState" : {
|
||||
"timestamp" : "2020-03-23T18:30:07.377Z",
|
||||
"value" : true
|
||||
}
|
||||
}
|
||||
],
|
||||
"dazzleEnabled" : true,
|
||||
"dateCreated" : "2019-11-28T15:58:48.968Z",
|
||||
"openWindowDetection" : {
|
||||
"timeoutInSeconds" : 900,
|
||||
"enabled" : true,
|
||||
"supported" : true
|
||||
},
|
||||
"deviceTypes" : [
|
||||
"WR02"
|
||||
],
|
||||
"reportAvailable" : false,
|
||||
"type" : "AIR_CONDITIONING"
|
||||
}
|
||||
]
|
||||
|
@@ -6,11 +6,6 @@ IGNORE_UNCAUGHT_EXCEPTIONS = [
|
||||
("tests.components.cast.test_media_player", "test_entry_setup_platform_not_ready"),
|
||||
("tests.components.config.test_automation", "test_delete_automation"),
|
||||
("tests.components.config.test_group", "test_update_device_config"),
|
||||
("tests.components.deconz.test_binary_sensor", "test_allow_clip_sensor"),
|
||||
("tests.components.deconz.test_climate", "test_clip_climate_device"),
|
||||
("tests.components.deconz.test_init", "test_unload_entry_multiple_gateways"),
|
||||
("tests.components.deconz.test_light", "test_disable_light_groups"),
|
||||
("tests.components.deconz.test_sensor", "test_allow_clip_sensors"),
|
||||
("tests.components.default_config.test_init", "test_setup"),
|
||||
("tests.components.demo.test_init", "test_setting_up_demo"),
|
||||
("tests.components.discovery.test_init", "test_discover_config_flow"),
|
||||
@@ -52,9 +47,6 @@ IGNORE_UNCAUGHT_EXCEPTIONS = [
|
||||
("tests.components.dyson.test_fan", "test_purecool_component_setup_only_once"),
|
||||
("tests.components.dyson.test_sensor", "test_purecool_component_setup_only_once"),
|
||||
("test_homeassistant_bridge", "test_homeassistant_bridge_fan_setup"),
|
||||
("tests.components.hue.test_bridge", "test_handle_unauthorized"),
|
||||
("tests.components.hue.test_init", "test_security_vuln_check"),
|
||||
("tests.components.hue.test_light", "test_group_features"),
|
||||
("tests.components.ios.test_init", "test_creating_entry_sets_up_sensor"),
|
||||
("tests.components.ios.test_init", "test_not_configuring_ios_not_creates_entry"),
|
||||
("tests.components.local_file.test_camera", "test_file_not_readable"),
|
||||
|
Reference in New Issue
Block a user