mirror of
https://github.com/home-assistant/core.git
synced 2025-08-07 14:45:09 +02:00
Merge remote-tracking branch 'upstream/dev' into insteon-fanlinc
This commit is contained in:
@@ -175,7 +175,6 @@ omit =
|
||||
homeassistant/components/climate/oem.py
|
||||
homeassistant/components/climate/proliphix.py
|
||||
homeassistant/components/climate/radiotherm.py
|
||||
homeassistant/components/config/zwave.py
|
||||
homeassistant/components/cover/garadget.py
|
||||
homeassistant/components/cover/homematic.py
|
||||
homeassistant/components/cover/myq.py
|
||||
@@ -326,6 +325,7 @@ omit =
|
||||
homeassistant/components/sensor/coinmarketcap.py
|
||||
homeassistant/components/sensor/comed_hourly_pricing.py
|
||||
homeassistant/components/sensor/cpuspeed.py
|
||||
homeassistant/components/sensor/crimereports.py
|
||||
homeassistant/components/sensor/cups.py
|
||||
homeassistant/components/sensor/currencylayer.py
|
||||
homeassistant/components/sensor/darksky.py
|
||||
@@ -441,7 +441,6 @@ omit =
|
||||
homeassistant/components/weather/openweathermap.py
|
||||
homeassistant/components/weather/zamg.py
|
||||
homeassistant/components/zeroconf.py
|
||||
homeassistant/components/zwave/__init__.py
|
||||
homeassistant/components/zwave/util.py
|
||||
|
||||
|
||||
|
@@ -20,6 +20,17 @@ from homeassistant.const import (
|
||||
from homeassistant.util.async import run_callback_threadsafe
|
||||
|
||||
|
||||
def attempt_use_uvloop():
|
||||
"""Attempt to use uvloop."""
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def monkey_patch_asyncio():
|
||||
"""Replace weakref.WeakSet to address Python 3 bug.
|
||||
|
||||
@@ -311,8 +322,7 @@ def setup_and_run_hass(config_dir: str,
|
||||
EVENT_HOMEASSISTANT_START, open_browser
|
||||
)
|
||||
|
||||
hass.start()
|
||||
return hass.exit_code
|
||||
return hass.start()
|
||||
|
||||
|
||||
def try_to_restart() -> None:
|
||||
@@ -359,11 +369,13 @@ def try_to_restart() -> None:
|
||||
|
||||
def main() -> int:
|
||||
"""Start Home Assistant."""
|
||||
validate_python()
|
||||
|
||||
attempt_use_uvloop()
|
||||
|
||||
if sys.version_info[:3] < (3, 5, 3):
|
||||
monkey_patch_asyncio()
|
||||
|
||||
validate_python()
|
||||
|
||||
args = get_arguments()
|
||||
|
||||
if args.script is not None:
|
||||
|
@@ -74,8 +74,6 @@ def async_from_config_dict(config: Dict[str, Any],
|
||||
This method is a coroutine.
|
||||
"""
|
||||
start = time()
|
||||
hass.async_track_tasks()
|
||||
|
||||
core_config = config.get(core.DOMAIN, {})
|
||||
|
||||
try:
|
||||
@@ -140,10 +138,10 @@ def async_from_config_dict(config: Dict[str, Any],
|
||||
continue
|
||||
hass.async_add_job(async_setup_component(hass, component, config))
|
||||
|
||||
yield from hass.async_stop_track_tasks()
|
||||
yield from hass.async_block_till_done()
|
||||
|
||||
stop = time()
|
||||
_LOGGER.info('Home Assistant initialized in %ss', round(stop-start, 2))
|
||||
_LOGGER.info('Home Assistant initialized in %.2fs', stop-start)
|
||||
|
||||
async_register_signal_handling(hass)
|
||||
return hass
|
||||
|
@@ -17,7 +17,7 @@ from homeassistant.const import (
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
REQUIREMENTS = ['pyalarmdotcom==0.2.9']
|
||||
REQUIREMENTS = ['pyalarmdotcom==0.3.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -17,9 +17,9 @@ from homeassistant.bootstrap import ERROR_LOG_FILENAME
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_STOP, EVENT_TIME_CHANGED,
|
||||
HTTP_BAD_REQUEST, HTTP_CREATED, HTTP_NOT_FOUND,
|
||||
HTTP_UNPROCESSABLE_ENTITY, MATCH_ALL, URL_API, URL_API_COMPONENTS,
|
||||
MATCH_ALL, URL_API, URL_API_COMPONENTS,
|
||||
URL_API_CONFIG, URL_API_DISCOVERY_INFO, URL_API_ERROR_LOG,
|
||||
URL_API_EVENT_FORWARD, URL_API_EVENTS, URL_API_SERVICES,
|
||||
URL_API_EVENTS, URL_API_SERVICES,
|
||||
URL_API_STATES, URL_API_STATES_ENTITY, URL_API_STREAM, URL_API_TEMPLATE,
|
||||
__version__)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
@@ -48,7 +48,6 @@ def setup(hass, config):
|
||||
hass.http.register_view(APIEventView)
|
||||
hass.http.register_view(APIServicesView)
|
||||
hass.http.register_view(APIDomainServicesView)
|
||||
hass.http.register_view(APIEventForwardingView)
|
||||
hass.http.register_view(APIComponentsView)
|
||||
hass.http.register_view(APITemplateView)
|
||||
|
||||
@@ -319,79 +318,6 @@ class APIDomainServicesView(HomeAssistantView):
|
||||
return self.json(changed_states)
|
||||
|
||||
|
||||
class APIEventForwardingView(HomeAssistantView):
|
||||
"""View to handle EventForwarding requests."""
|
||||
|
||||
url = URL_API_EVENT_FORWARD
|
||||
name = "api:event-forward"
|
||||
event_forwarder = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def post(self, request):
|
||||
"""Setup an event forwarder."""
|
||||
_LOGGER.warning('Event forwarding is deprecated. '
|
||||
'Will be removed by 0.43')
|
||||
hass = request.app['hass']
|
||||
try:
|
||||
data = yield from request.json()
|
||||
except ValueError:
|
||||
return self.json_message("No data received.", HTTP_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
host = data['host']
|
||||
api_password = data['api_password']
|
||||
except KeyError:
|
||||
return self.json_message("No host or api_password received.",
|
||||
HTTP_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
port = int(data['port']) if 'port' in data else None
|
||||
except ValueError:
|
||||
return self.json_message("Invalid value received for port.",
|
||||
HTTP_UNPROCESSABLE_ENTITY)
|
||||
|
||||
api = rem.API(host, api_password, port)
|
||||
|
||||
valid = yield from hass.loop.run_in_executor(
|
||||
None, api.validate_api)
|
||||
if not valid:
|
||||
return self.json_message("Unable to validate API.",
|
||||
HTTP_UNPROCESSABLE_ENTITY)
|
||||
|
||||
if self.event_forwarder is None:
|
||||
self.event_forwarder = rem.EventForwarder(hass)
|
||||
|
||||
self.event_forwarder.async_connect(api)
|
||||
|
||||
return self.json_message("Event forwarding setup.")
|
||||
|
||||
@asyncio.coroutine
|
||||
def delete(self, request):
|
||||
"""Remove event forwarder."""
|
||||
try:
|
||||
data = yield from request.json()
|
||||
except ValueError:
|
||||
return self.json_message("No data received.", HTTP_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
host = data['host']
|
||||
except KeyError:
|
||||
return self.json_message("No host received.", HTTP_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
port = int(data['port']) if 'port' in data else None
|
||||
except ValueError:
|
||||
return self.json_message("Invalid value received for port.",
|
||||
HTTP_UNPROCESSABLE_ENTITY)
|
||||
|
||||
if self.event_forwarder is not None:
|
||||
api = rem.API(host, None, port)
|
||||
|
||||
self.event_forwarder.async_disconnect(api)
|
||||
|
||||
return self.json_message("Event forwarding cancelled.")
|
||||
|
||||
|
||||
class APIComponentsView(HomeAssistantView):
|
||||
"""View to handle Components requests."""
|
||||
|
||||
|
@@ -2,15 +2,15 @@
|
||||
Offer event listening automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#event-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#event-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import CONF_PLATFORM
|
||||
from homeassistant.core import callback, CoreState
|
||||
from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
CONF_EVENT_TYPE = "event_type"
|
||||
@@ -31,6 +31,19 @@ def async_trigger(hass, config, action):
|
||||
event_type = config.get(CONF_EVENT_TYPE)
|
||||
event_data = config.get(CONF_EVENT_DATA)
|
||||
|
||||
if (event_type == EVENT_HOMEASSISTANT_START and
|
||||
hass.state == CoreState.starting):
|
||||
_LOGGER.warning('Deprecation: Automations should not listen to event '
|
||||
"'homeassistant_start'. Use platform 'homeassistant' "
|
||||
'instead. Feature will be removed in 0.45')
|
||||
hass.async_run_job(action, {
|
||||
'trigger': {
|
||||
'platform': 'event',
|
||||
'event': None,
|
||||
},
|
||||
})
|
||||
return lambda: None
|
||||
|
||||
@callback
|
||||
def handle_event(event):
|
||||
"""Listen for events and calls the action when data matches."""
|
||||
|
55
homeassistant/components/automation/homeassistant.py
Normal file
55
homeassistant/components/automation/homeassistant.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
Offer Home Assistant core automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#homeassistant-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback, CoreState
|
||||
from homeassistant.const import (
|
||||
CONF_PLATFORM, CONF_EVENT, EVENT_HOMEASSISTANT_STOP)
|
||||
|
||||
EVENT_START = 'start'
|
||||
EVENT_SHUTDOWN = 'shutdown'
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TRIGGER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_PLATFORM): 'homeassistant',
|
||||
vol.Required(CONF_EVENT): vol.Any(EVENT_START, EVENT_SHUTDOWN),
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_trigger(hass, config, action):
|
||||
"""Listen for events based on configuration."""
|
||||
event = config.get(CONF_EVENT)
|
||||
|
||||
if event == EVENT_SHUTDOWN:
|
||||
@callback
|
||||
def hass_shutdown(event):
|
||||
"""Called when Home Assistant is shutting down."""
|
||||
hass.async_run_job(action, {
|
||||
'trigger': {
|
||||
'platform': 'homeassistant',
|
||||
'event': event,
|
||||
},
|
||||
})
|
||||
|
||||
return hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP,
|
||||
hass_shutdown)
|
||||
|
||||
# Automation are enabled while hass is starting up, fire right away
|
||||
# Check state because a config reload shouldn't trigger it.
|
||||
elif hass.state == CoreState.starting:
|
||||
hass.async_run_job(action, {
|
||||
'trigger': {
|
||||
'platform': 'homeassistant',
|
||||
'event': event,
|
||||
},
|
||||
})
|
||||
|
||||
return lambda: None
|
@@ -70,7 +70,7 @@ def async_trigger(hass, config, action):
|
||||
nonlocal held_less_than, held_more_than
|
||||
pressed_time = dt_util.utcnow()
|
||||
if held_more_than is None and held_less_than is None:
|
||||
call_action()
|
||||
hass.add_job(call_action)
|
||||
if held_more_than is not None and held_less_than is None:
|
||||
cancel_pressed_more_than = track_point_in_utc_time(
|
||||
hass,
|
||||
@@ -88,7 +88,7 @@ def async_trigger(hass, config, action):
|
||||
held_time = dt_util.utcnow() - pressed_time
|
||||
if held_less_than is not None and held_time < held_less_than:
|
||||
if held_more_than is None or held_time > held_more_than:
|
||||
call_action()
|
||||
hass.add_job(call_action)
|
||||
|
||||
hass.data['litejet_system'].on_switch_pressed(number, pressed)
|
||||
hass.data['litejet_system'].on_switch_released(number, released)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer MQTT listening automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#mqtt-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#mqtt-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import json
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer numeric state listening automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#numeric-state-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#numeric-state-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer state listening automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#state-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#state-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import voluptuous as vol
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer sun based automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#sun-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#sun-trigger
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer template automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#template-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#template-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer time listening automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#time-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#time-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Offer zone automation rules.
|
||||
|
||||
For more details about this automation rule, please refer to the documentation
|
||||
at https://home-assistant.io/components/automation/#zone-trigger
|
||||
at https://home-assistant.io/docs/automation/trigger/#zone-trigger
|
||||
"""
|
||||
import asyncio
|
||||
import voluptuous as vol
|
||||
|
@@ -66,8 +66,9 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
obj_holidays = getattr(holidays, country)(years=year)
|
||||
|
||||
if province:
|
||||
if province not in obj_holidays.PROVINCES:
|
||||
_LOGGER.error('There is no province/state %s in country %s',
|
||||
if province not in obj_holidays.PROVINCES and \
|
||||
province not in obj_holidays.STATES:
|
||||
_LOGGER.error("There is no province/state %s in country %s",
|
||||
province, country)
|
||||
return False
|
||||
else:
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_get_clientsession, async_aiohttp_proxy_web)
|
||||
|
||||
REQUIREMENTS = ['amcrest==1.1.5']
|
||||
REQUIREMENTS = ['amcrest==1.1.8']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -66,9 +66,13 @@ class FoscamCamera(Camera):
|
||||
def camera_image(self):
|
||||
"""Return a still image reponse from the camera."""
|
||||
# Send the request to snap a picture and return raw jpg data
|
||||
response = requests.get(self._snap_picture_url, timeout=10)
|
||||
|
||||
return response.content
|
||||
# Handle exception if host is not reachable or url failed
|
||||
try:
|
||||
response = requests.get(self._snap_picture_url, timeout=10)
|
||||
except requests.exceptions.ConnectionError:
|
||||
return None
|
||||
else:
|
||||
return response.content
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@@ -81,7 +81,9 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
params=query_payload
|
||||
)
|
||||
|
||||
query_resp = yield from query_req.json()
|
||||
# Skip content type check because Synology doesn't return JSON with
|
||||
# right content type
|
||||
query_resp = yield from query_req.json(content_type=None)
|
||||
auth_path = query_resp['data'][AUTH_API]['path']
|
||||
camera_api = query_resp['data'][CAMERA_API]['path']
|
||||
camera_path = query_resp['data'][CAMERA_API]['path']
|
||||
@@ -127,7 +129,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
_LOGGER.exception("Error on %s", syno_camera_url)
|
||||
return False
|
||||
|
||||
camera_resp = yield from camera_req.json()
|
||||
camera_resp = yield from camera_req.json(content_type=None)
|
||||
cameras = camera_resp['data']['cameras']
|
||||
|
||||
# add cameras
|
||||
@@ -172,7 +174,7 @@ def get_session_id(hass, websession, username, password, login_url, timeout):
|
||||
login_url,
|
||||
params=auth_payload
|
||||
)
|
||||
auth_resp = yield from auth_req.json()
|
||||
auth_resp = yield from auth_req.json(content_type=None)
|
||||
return auth_resp['data']['sid']
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
|
@@ -692,18 +692,16 @@ class ClimateDevice(Entity):
|
||||
|
||||
def _convert_for_display(self, temp):
|
||||
"""Convert temperature into preferred units for display purposes."""
|
||||
if (temp is None or not isinstance(temp, Number) or
|
||||
self.temperature_unit == self.unit_of_measurement):
|
||||
if temp is None or not isinstance(temp, Number):
|
||||
return temp
|
||||
|
||||
value = convert_temperature(temp, self.temperature_unit,
|
||||
self.unit_of_measurement)
|
||||
|
||||
if self.temperature_unit != self.unit_of_measurement:
|
||||
temp = convert_temperature(temp, self.temperature_unit,
|
||||
self.unit_of_measurement)
|
||||
# Round in the units appropriate
|
||||
if self.precision == PRECISION_HALVES:
|
||||
return round(value * 2) / 2.0
|
||||
return round(temp * 2) / 2.0
|
||||
elif self.precision == PRECISION_TENTHS:
|
||||
return round(value, 1)
|
||||
return round(temp, 1)
|
||||
else:
|
||||
# PRECISION_WHOLE as a fall back
|
||||
return round(value)
|
||||
return round(temp)
|
||||
|
@@ -16,11 +16,11 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def setup_scanner(hass, config, see, discovery_info=None):
|
||||
"""Setup the MySensors tracker."""
|
||||
def mysensors_callback(gateway, node_id):
|
||||
def mysensors_callback(gateway, msg):
|
||||
"""Callback for mysensors platform."""
|
||||
node = gateway.sensors[node_id]
|
||||
node = gateway.sensors[msg.node_id]
|
||||
if node.sketch_name is None:
|
||||
_LOGGER.info('No sketch_name: node %s', node_id)
|
||||
_LOGGER.info('No sketch_name: node %s', msg.node_id)
|
||||
return
|
||||
|
||||
pres = gateway.const.Presentation
|
||||
@@ -37,12 +37,12 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
||||
'latitude,longitude,altitude', position)
|
||||
continue
|
||||
name = '{} {} {}'.format(
|
||||
node.sketch_name, node_id, child.id)
|
||||
node.sketch_name, msg.node_id, child.id)
|
||||
attr = {
|
||||
mysensors.ATTR_CHILD_ID: child.id,
|
||||
mysensors.ATTR_DESCRIPTION: child.description,
|
||||
mysensors.ATTR_DEVICE: gateway.device,
|
||||
mysensors.ATTR_NODE_ID: node_id,
|
||||
mysensors.ATTR_NODE_ID: msg.node_id,
|
||||
}
|
||||
see(
|
||||
dev_id=slugify(name),
|
||||
|
@@ -13,6 +13,7 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
@@ -105,7 +106,7 @@ def async_setup(hass, config):
|
||||
hass, component, platform, info, config)
|
||||
|
||||
@asyncio.coroutine
|
||||
def scan_devices(_):
|
||||
def scan_devices(now):
|
||||
"""Scan for devices."""
|
||||
results = yield from hass.loop.run_in_executor(
|
||||
None, _discover, netdisco)
|
||||
@@ -116,7 +117,12 @@ def async_setup(hass, config):
|
||||
async_track_point_in_utc_time(hass, scan_devices,
|
||||
dt_util.utcnow() + SCAN_INTERVAL)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, scan_devices)
|
||||
@callback
|
||||
def schedule_first(event):
|
||||
"""Schedule the first discovery when Home Assistant starts up."""
|
||||
async_track_point_in_utc_time(hass, scan_devices, dt_util.utcnow())
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_first)
|
||||
|
||||
return True
|
||||
|
||||
|
272
homeassistant/components/hassio.py
Normal file
272
homeassistant/components/hassio.py
Normal file
@@ -0,0 +1,272 @@
|
||||
"""
|
||||
Exposes regular rest commands as services.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/hassio/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway
|
||||
import async_timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DOMAIN = 'hassio'
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LONG_TASK_TIMEOUT = 900
|
||||
DEFAULT_TIMEOUT = 10
|
||||
|
||||
SERVICE_HOST_SHUTDOWN = 'host_shutdown'
|
||||
SERVICE_HOST_REBOOT = 'host_reboot'
|
||||
|
||||
SERVICE_HOST_UPDATE = 'host_update'
|
||||
SERVICE_SUPERVISOR_UPDATE = 'supervisor_update'
|
||||
SERVICE_HOMEASSISTANT_UPDATE = 'homeassistant_update'
|
||||
|
||||
SERVICE_ADDON_INSTALL = 'addon_install'
|
||||
SERVICE_ADDON_UNINSTALL = 'addon_uninstall'
|
||||
SERVICE_ADDON_UPDATE = 'addon_update'
|
||||
SERVICE_ADDON_START = 'addon_start'
|
||||
SERVICE_ADDON_STOP = 'addon_stop'
|
||||
|
||||
ATTR_ADDON = 'addon'
|
||||
ATTR_VERSION = 'version'
|
||||
|
||||
|
||||
SCHEMA_SERVICE_UPDATE = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): cv.string,
|
||||
})
|
||||
|
||||
SCHEMA_SERVICE_ADDONS = vol.Schema({
|
||||
vol.Required(ATTR_ADDON): cv.slug,
|
||||
})
|
||||
|
||||
SCHEMA_SERVICE_ADDONS_VERSION = SCHEMA_SERVICE_ADDONS.extend({
|
||||
vol.Optional(ATTR_VERSION): cv.string,
|
||||
})
|
||||
|
||||
|
||||
SERVICE_MAP = {
|
||||
SERVICE_HOST_SHUTDOWN: None,
|
||||
SERVICE_HOST_REBOOT: None,
|
||||
SERVICE_HOST_UPDATE: SCHEMA_SERVICE_UPDATE,
|
||||
SERVICE_SUPERVISOR_UPDATE: SCHEMA_SERVICE_UPDATE,
|
||||
SERVICE_HOMEASSISTANT_UPDATE: SCHEMA_SERVICE_UPDATE,
|
||||
SERVICE_ADDON_INSTALL: SCHEMA_SERVICE_ADDONS_VERSION,
|
||||
SERVICE_ADDON_UNINSTALL: SCHEMA_SERVICE_ADDONS,
|
||||
SERVICE_ADDON_START: SCHEMA_SERVICE_ADDONS,
|
||||
SERVICE_ADDON_STOP: SCHEMA_SERVICE_ADDONS,
|
||||
SERVICE_ADDON_UPDATE: SCHEMA_SERVICE_ADDONS_VERSION,
|
||||
}
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Setup the hassio component."""
|
||||
try:
|
||||
host = os.environ['HASSIO']
|
||||
except KeyError:
|
||||
_LOGGER.error("No HassIO supervisor detect!")
|
||||
return False
|
||||
|
||||
websession = async_get_clientsession(hass)
|
||||
hassio = HassIO(hass.loop, websession, host)
|
||||
|
||||
api_ok = yield from hassio.is_connected()
|
||||
if not api_ok:
|
||||
_LOGGER.error("Not connected with HassIO!")
|
||||
return False
|
||||
|
||||
# register base api views
|
||||
for base in ('host', 'homeassistant'):
|
||||
hass.http.register_view(HassIOBaseView(hassio, base))
|
||||
for base in ('supervisor', 'network'):
|
||||
hass.http.register_view(HassIOBaseEditView(hassio, base))
|
||||
|
||||
# register view for addons
|
||||
hass.http.register_view(HassIOAddonsView(hassio))
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_service_handler(service):
|
||||
"""Handle HassIO service calls."""
|
||||
addon = service.data.get(ATTR_ADDON)
|
||||
if ATTR_VERSION in service.data:
|
||||
version = {ATTR_VERSION: service.data[ATTR_VERSION]}
|
||||
else:
|
||||
version = None
|
||||
|
||||
# map to api call
|
||||
if service.service == SERVICE_HOST_UPDATE:
|
||||
yield from hassio.send_command(
|
||||
"/host/update", payload=version)
|
||||
elif service.service == SERVICE_HOST_REBOOT:
|
||||
yield from hassio.send_command("/host/reboot")
|
||||
elif service.service == SERVICE_HOST_SHUTDOWN:
|
||||
yield from hassio.send_command("/host/shutdown")
|
||||
elif service.service == SERVICE_SUPERVISOR_UPDATE:
|
||||
yield from hassio.send_command(
|
||||
"/supervisor/update", payload=version)
|
||||
elif service.service == SERVICE_HOMEASSISTANT_UPDATE:
|
||||
yield from hassio.send_command(
|
||||
"/homeassistant/update", payload=version,
|
||||
timeout=LONG_TASK_TIMEOUT)
|
||||
elif service.service == SERVICE_ADDON_INSTALL:
|
||||
yield from hassio.send_command(
|
||||
"/addons/{}/install".format(addon), payload=version,
|
||||
timeout=LONG_TASK_TIMEOUT)
|
||||
elif service.service == SERVICE_ADDON_UNINSTALL:
|
||||
yield from hassio.send_command(
|
||||
"/addons/{}/uninstall".format(addon))
|
||||
elif service.service == SERVICE_ADDON_START:
|
||||
yield from hassio.send_command("/addons/{}/start".format(addon))
|
||||
elif service.service == SERVICE_ADDON_STOP:
|
||||
yield from hassio.send_command("/addons/{}/stop".format(addon))
|
||||
elif service.service == SERVICE_ADDON_UPDATE:
|
||||
yield from hassio.send_command(
|
||||
"/addons/{}/update".format(addon), payload=version,
|
||||
timeout=LONG_TASK_TIMEOUT)
|
||||
|
||||
descriptions = yield from hass.loop.run_in_executor(
|
||||
None, load_yaml_config_file, os.path.join(
|
||||
os.path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
for service, schema in SERVICE_MAP.items():
|
||||
hass.services.async_register(
|
||||
DOMAIN, service, async_service_handler,
|
||||
descriptions[DOMAIN][service], schema=schema)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassIO(object):
|
||||
"""Small API wrapper for HassIO."""
|
||||
|
||||
def __init__(self, loop, websession, ip):
|
||||
"""Initialze HassIO api."""
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
self._ip = ip
|
||||
|
||||
def is_connected(self):
|
||||
"""Return True if it connected to HassIO supervisor.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.send_command("/supervisor/ping")
|
||||
|
||||
@asyncio.coroutine
|
||||
def send_command(self, cmd, payload=None, timeout=DEFAULT_TIMEOUT):
|
||||
"""Send request to API."""
|
||||
answer = yield from self.send_raw(cmd, payload=payload)
|
||||
if answer['result'] == 'ok':
|
||||
return answer['data'] if answer['data'] else True
|
||||
|
||||
_LOGGER.error("%s return error %s.", cmd, answer['message'])
|
||||
return False
|
||||
|
||||
@asyncio.coroutine
|
||||
def send_raw(self, cmd, payload=None, timeout=DEFAULT_TIMEOUT):
|
||||
"""Send raw request to API."""
|
||||
try:
|
||||
with async_timeout.timeout(timeout, loop=self.loop):
|
||||
request = yield from self.websession.get(
|
||||
"http://{}{}".format(self._ip, cmd),
|
||||
timeout=None, json=payload
|
||||
)
|
||||
|
||||
if request.status != 200:
|
||||
_LOGGER.error("%s return code %d.", cmd, request.status)
|
||||
return
|
||||
|
||||
return (yield from request.json())
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout on api request %s.", cmd)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
_LOGGER.error("Client error on api request %s.", cmd)
|
||||
|
||||
|
||||
class HassIOBaseView(HomeAssistantView):
|
||||
"""HassIO view to handle base part."""
|
||||
|
||||
requires_auth = True
|
||||
|
||||
def __init__(self, hassio, base):
|
||||
"""Initialize a hassio base view."""
|
||||
self.hassio = hassio
|
||||
self._url_info = "/{}/info".format(base)
|
||||
|
||||
self.url = "/api/hassio/{}".format(base)
|
||||
self.name = "api:hassio:{}".format(base)
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request):
|
||||
"""Get base data."""
|
||||
data = yield from self.hassio.send_command(self._url_info)
|
||||
if not data:
|
||||
raise HTTPBadGateway()
|
||||
return web.json_response(data)
|
||||
|
||||
|
||||
class HassIOBaseEditView(HassIOBaseView):
|
||||
"""HassIO view to handle base with options support."""
|
||||
|
||||
def __init__(self, hassio, base):
|
||||
"""Initialize a hassio base edit view."""
|
||||
super().__init__(hassio, base)
|
||||
self._url_options = "/{}/options".format(base)
|
||||
|
||||
@asyncio.coroutine
|
||||
def post(self, request):
|
||||
"""Set options on host."""
|
||||
data = yield from request.json()
|
||||
|
||||
response = yield from self.hassio.send_raw(
|
||||
self._url_options, payload=data)
|
||||
if not response:
|
||||
raise HTTPBadGateway()
|
||||
return web.json_response(response)
|
||||
|
||||
|
||||
class HassIOAddonsView(HomeAssistantView):
|
||||
"""HassIO view to handle addons part."""
|
||||
|
||||
requires_auth = True
|
||||
url = "/api/hassio/addons/{addon}"
|
||||
name = "api:hassio:addons"
|
||||
|
||||
def __init__(self, hassio):
|
||||
"""Initialize a hassio addon view."""
|
||||
self.hassio = hassio
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request, addon):
|
||||
"""Get addon data."""
|
||||
data = yield from self.hassio.send_command(
|
||||
"/addons/{}/info".format(addon))
|
||||
if not data:
|
||||
raise HTTPBadGateway()
|
||||
return web.json_response(data)
|
||||
|
||||
@asyncio.coroutine
|
||||
def post(self, request, addon):
|
||||
"""Set options on host."""
|
||||
data = yield from request.json()
|
||||
|
||||
response = yield from self.hassio.send_raw(
|
||||
"/addons/{}/options".format(addon), payload=data)
|
||||
if not response:
|
||||
raise HTTPBadGateway()
|
||||
return web.json_response(response)
|
@@ -48,7 +48,7 @@ class LiteJetLight(Light):
|
||||
def _on_load_changed(self):
|
||||
"""Called on a LiteJet thread when a load's state changes."""
|
||||
_LOGGER.debug("Updating due to notification for %s", self._name)
|
||||
self._hass.async_add_job(self.async_update_ha_state(True))
|
||||
self.schedule_update_ha_state(True)
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
|
@@ -255,7 +255,8 @@ class YeelightLight(Light):
|
||||
def set_flash(self, flash) -> None:
|
||||
"""Activate flash."""
|
||||
if flash:
|
||||
from yeelight import RGBTransition, SleepTransition, Flow
|
||||
from yeelight import (RGBTransition, SleepTransition, Flow,
|
||||
BulbException)
|
||||
if self._bulb.last_properties["color_mode"] != 1:
|
||||
_LOGGER.error("Flash supported currently only in RGB mode.")
|
||||
return
|
||||
@@ -280,10 +281,14 @@ class YeelightLight(Light):
|
||||
duration=duration))
|
||||
|
||||
flow = Flow(count=count, transitions=transitions)
|
||||
self._bulb.start_flow(flow)
|
||||
try:
|
||||
self._bulb.start_flow(flow)
|
||||
except BulbException as ex:
|
||||
_LOGGER.error("Unable to set flash: %s", ex)
|
||||
|
||||
def turn_on(self, **kwargs) -> None:
|
||||
"""Turn the bulb on."""
|
||||
import yeelight
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
colortemp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
rgb = kwargs.get(ATTR_RGB_COLOR)
|
||||
@@ -293,22 +298,43 @@ class YeelightLight(Light):
|
||||
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
|
||||
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
|
||||
|
||||
self._bulb.turn_on(duration=duration)
|
||||
try:
|
||||
self._bulb.turn_on(duration=duration)
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to turn the bulb on: %s", ex)
|
||||
return
|
||||
|
||||
if self.config[CONF_MODE_MUSIC] and not self._bulb.music_mode:
|
||||
self.set_music_mode(self.config[CONF_MODE_MUSIC])
|
||||
try:
|
||||
self.set_music_mode(self.config[CONF_MODE_MUSIC])
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to turn on music mode,"
|
||||
"consider disabling it: %s", ex)
|
||||
|
||||
# values checked for none in methods
|
||||
self.set_rgb(rgb, duration)
|
||||
self.set_colortemp(colortemp, duration)
|
||||
self.set_brightness(brightness, duration)
|
||||
self.set_flash(flash)
|
||||
try:
|
||||
# values checked for none in methods
|
||||
self.set_rgb(rgb, duration)
|
||||
self.set_colortemp(colortemp, duration)
|
||||
self.set_brightness(brightness, duration)
|
||||
self.set_flash(flash)
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to set bulb properties: %s", ex)
|
||||
return
|
||||
|
||||
# save the current state if we had a manual change.
|
||||
if self.config[CONF_SAVE_ON_CHANGE]:
|
||||
if brightness or colortemp or rgb:
|
||||
if self.config[CONF_SAVE_ON_CHANGE] and (brightness
|
||||
or colortemp
|
||||
or rgb):
|
||||
try:
|
||||
self.set_default()
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to set the defaults: %s", ex)
|
||||
return
|
||||
|
||||
def turn_off(self, **kwargs) -> None:
|
||||
"""Turn off."""
|
||||
self._bulb.turn_off()
|
||||
import yeelight
|
||||
try:
|
||||
self._bulb.turn_off()
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to turn the bulb off: %s", ex)
|
||||
|
@@ -21,7 +21,7 @@ from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['pyemby==1.1']
|
||||
REQUIREMENTS = ['pyemby==1.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -9,9 +9,9 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
MEDIA_TYPE_MUSIC, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
|
||||
SUPPORT_PAUSE, SUPPORT_SEEK, SUPPORT_STOP, SUPPORT_PLAY_MEDIA,
|
||||
SUPPORT_PLAY, SUPPORT_NEXT_TRACK, PLATFORM_SCHEMA, MediaPlayerDevice)
|
||||
MEDIA_TYPE_MUSIC, SUPPORT_VOLUME_SET, SUPPORT_PAUSE,
|
||||
SUPPORT_PLAY_MEDIA, SUPPORT_PLAY, SUPPORT_NEXT_TRACK,
|
||||
PLATFORM_SCHEMA, MediaPlayerDevice)
|
||||
from homeassistant.const import (
|
||||
STATE_IDLE, CONF_NAME, EVENT_HOMEASSISTANT_STOP)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -20,14 +20,13 @@ import homeassistant.helpers.config_validation as cv
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
REQUIREMENTS = ['gstreamer-player==1.0.0']
|
||||
REQUIREMENTS = ['gstreamer-player==1.1.0']
|
||||
DOMAIN = 'gstreamer'
|
||||
CONF_PIPELINE = 'pipeline'
|
||||
|
||||
|
||||
SUPPORT_GSTREAMER = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
|
||||
SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_SEEK | SUPPORT_STOP | \
|
||||
SUPPORT_PLAY_MEDIA | SUPPORT_SEEK | SUPPORT_NEXT_TRACK
|
||||
SUPPORT_GSTREAMER = SUPPORT_VOLUME_SET | SUPPORT_PLAY | SUPPORT_PAUSE |\
|
||||
SUPPORT_PLAY_MEDIA | SUPPORT_NEXT_TRACK
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
@@ -61,7 +60,6 @@ class GstreamerDevice(MediaPlayerDevice):
|
||||
self._state = STATE_IDLE
|
||||
self._volume = None
|
||||
self._duration = None
|
||||
self._position = None
|
||||
self._uri = None
|
||||
self._title = None
|
||||
self._artist = None
|
||||
@@ -72,16 +70,11 @@ class GstreamerDevice(MediaPlayerDevice):
|
||||
self._state = self._player.state
|
||||
self._volume = self._player.volume
|
||||
self._duration = self._player.duration
|
||||
self._position = self._player.position
|
||||
self._uri = self._player.uri
|
||||
self._title = self._player.title
|
||||
self._album = self._player.album
|
||||
self._artist = self._player.artist
|
||||
|
||||
def mute_volume(self, mute):
|
||||
"""Send the mute command."""
|
||||
self._player.mute()
|
||||
|
||||
def set_volume_level(self, volume):
|
||||
"""Set the volume level."""
|
||||
self._player.volume = volume
|
||||
@@ -93,9 +86,13 @@ class GstreamerDevice(MediaPlayerDevice):
|
||||
return
|
||||
self._player.queue(media_id)
|
||||
|
||||
def media_seek(self, position):
|
||||
"""Seek."""
|
||||
self._player.position = position
|
||||
def media_play(self):
|
||||
"""Play."""
|
||||
self._player.play()
|
||||
|
||||
def media_pause(self):
|
||||
"""Pause."""
|
||||
self._player.pause()
|
||||
|
||||
def media_next_track(self):
|
||||
"""Next track."""
|
||||
@@ -121,11 +118,6 @@ class GstreamerDevice(MediaPlayerDevice):
|
||||
"""Return the volume level."""
|
||||
return self._volume
|
||||
|
||||
@property
|
||||
def is_volume_muted(self):
|
||||
"""Volume muted."""
|
||||
return self._volume == 0
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag media player features that are supported."""
|
||||
@@ -141,11 +133,6 @@ class GstreamerDevice(MediaPlayerDevice):
|
||||
"""Duration of current playing media in seconds."""
|
||||
return self._duration
|
||||
|
||||
@property
|
||||
def media_position(self):
|
||||
"""Position of current playing media in seconds."""
|
||||
return self._position
|
||||
|
||||
@property
|
||||
def media_title(self):
|
||||
"""Media title."""
|
||||
|
@@ -27,7 +27,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.deprecation import get_deprecated
|
||||
|
||||
REQUIREMENTS = ['jsonrpc-async==0.4', 'jsonrpc-websocket==0.3']
|
||||
REQUIREMENTS = ['jsonrpc-async==0.6', 'jsonrpc-websocket==0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -4,11 +4,14 @@ Support to interface with Sonos players (via SoCo).
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/media_player.sonos/
|
||||
"""
|
||||
import asyncio
|
||||
import datetime
|
||||
import functools as ft
|
||||
import logging
|
||||
from os import path
|
||||
import socket
|
||||
import urllib
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -107,7 +110,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
return
|
||||
|
||||
if player.is_visible:
|
||||
device = SonosDevice(hass, player)
|
||||
device = SonosDevice(player)
|
||||
add_devices([device], True)
|
||||
hass.data[DATA_SONOS].append(device)
|
||||
if len(hass.data[DATA_SONOS]) > 1:
|
||||
@@ -132,7 +135,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
_LOGGER.warning('No Sonos speakers found.')
|
||||
return
|
||||
|
||||
hass.data[DATA_SONOS] = [SonosDevice(hass, p) for p in players]
|
||||
hass.data[DATA_SONOS] = [SonosDevice(p) for p in players]
|
||||
add_devices(hass.data[DATA_SONOS], True)
|
||||
_LOGGER.info('Added %s Sonos speakers', len(players))
|
||||
|
||||
@@ -216,19 +219,42 @@ class _ProcessSonosEventQueue():
|
||||
def _get_entity_from_soco(hass, soco):
|
||||
"""Return SonosDevice from SoCo."""
|
||||
for device in hass.data[DATA_SONOS]:
|
||||
if soco == device.soco_device:
|
||||
if soco == device.soco:
|
||||
return device
|
||||
raise ValueError("No entity for SoCo device!")
|
||||
|
||||
|
||||
def soco_error(funct):
|
||||
"""Decorator to catch soco exceptions."""
|
||||
@ft.wraps(funct)
|
||||
def wrapper(*args, **kwargs):
|
||||
"""Wrapper for all soco exception."""
|
||||
from soco.exceptions import SoCoException
|
||||
try:
|
||||
return funct(*args, **kwargs)
|
||||
except SoCoException as err:
|
||||
_LOGGER.error("Error on %s with %s.", funct.__name__, err)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def soco_coordinator(funct):
|
||||
"""Decorator to call funct on coordinator."""
|
||||
@ft.wraps(funct)
|
||||
def wrapper(device, *args, **kwargs):
|
||||
"""Wrapper for call to coordinator."""
|
||||
if device.is_coordinator:
|
||||
return funct(device, *args, **kwargs)
|
||||
return funct(device.coordinator, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class SonosDevice(MediaPlayerDevice):
|
||||
"""Representation of a Sonos device."""
|
||||
|
||||
def __init__(self, hass, player):
|
||||
def __init__(self, player):
|
||||
"""Initialize the Sonos device."""
|
||||
from soco.snapshot import Snapshot
|
||||
|
||||
self.hass = hass
|
||||
self.volume_increment = 5
|
||||
self._unique_id = player.uid
|
||||
self._player = player
|
||||
@@ -260,9 +286,14 @@ class SonosDevice(MediaPlayerDevice):
|
||||
self._is_playing_tv = None
|
||||
self._favorite_sources = None
|
||||
self._source_name = None
|
||||
self.soco_snapshot = Snapshot(self._player)
|
||||
self._soco_snapshot = None
|
||||
self._snapshot_group = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Subscribe sonos events."""
|
||||
self.hass.loop.run_in_executor(None, self._subscribe_to_player_events)
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Polling needed."""
|
||||
@@ -297,7 +328,7 @@ class SonosDevice(MediaPlayerDevice):
|
||||
return self._coordinator is None
|
||||
|
||||
@property
|
||||
def soco_device(self):
|
||||
def soco(self):
|
||||
"""Return soco device."""
|
||||
return self._player
|
||||
|
||||
@@ -327,7 +358,6 @@ class SonosDevice(MediaPlayerDevice):
|
||||
auto_renew=True,
|
||||
event_queue=self._queue)
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-statements
|
||||
def update(self):
|
||||
"""Retrieve latest state."""
|
||||
if self._speaker_info is None:
|
||||
@@ -606,16 +636,6 @@ class SonosDevice(MediaPlayerDevice):
|
||||
self._is_playing_tv = is_playing_tv
|
||||
self._is_playing_line_in = is_playing_line_in
|
||||
self._source_name = source_name
|
||||
|
||||
# update state of the whole group
|
||||
for device in [x for x in self.hass.data[DATA_SONOS]
|
||||
if x.coordinator == self]:
|
||||
if device.entity_id is not self.entity_id:
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
if self._queue is None and self.entity_id is not None:
|
||||
self._subscribe_to_player_events()
|
||||
|
||||
self._last_avtransport_event = None
|
||||
|
||||
def _format_media_image_url(self, url, fallback_uri):
|
||||
@@ -781,27 +801,31 @@ class SonosDevice(MediaPlayerDevice):
|
||||
|
||||
return supported
|
||||
|
||||
@soco_error
|
||||
def volume_up(self):
|
||||
"""Volume up media player."""
|
||||
self._player.volume += self.volume_increment
|
||||
|
||||
@soco_error
|
||||
def volume_down(self):
|
||||
"""Volume down media player."""
|
||||
self._player.volume -= self.volume_increment
|
||||
|
||||
@soco_error
|
||||
def set_volume_level(self, volume):
|
||||
"""Set volume level, range 0..1."""
|
||||
self._player.volume = str(int(volume * 100))
|
||||
|
||||
@soco_error
|
||||
def mute_volume(self, mute):
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
self._player.mute = mute
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def select_source(self, source):
|
||||
"""Select input source."""
|
||||
if self._coordinator:
|
||||
self._coordinator.select_source(source)
|
||||
elif source == SUPPORT_SOURCE_LINEIN:
|
||||
if source == SUPPORT_SOURCE_LINEIN:
|
||||
self._source_name = SUPPORT_SOURCE_LINEIN
|
||||
self._player.switch_to_line_in()
|
||||
elif source == SUPPORT_SOURCE_TV:
|
||||
@@ -842,83 +866,78 @@ class SonosDevice(MediaPlayerDevice):
|
||||
else:
|
||||
return self._source_name
|
||||
|
||||
@soco_error
|
||||
def turn_off(self):
|
||||
"""Turn off media player."""
|
||||
self.media_pause()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_play(self):
|
||||
"""Send play command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_play()
|
||||
else:
|
||||
self._player.play()
|
||||
self._player.play()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_stop(self):
|
||||
"""Send stop command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_stop()
|
||||
else:
|
||||
self._player.stop()
|
||||
self._player.stop()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_pause(self):
|
||||
"""Send pause command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_pause()
|
||||
else:
|
||||
self._player.pause()
|
||||
self._player.pause()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_next_track(self):
|
||||
"""Send next track command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_next_track()
|
||||
else:
|
||||
self._player.next()
|
||||
self._player.next()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_previous_track(self):
|
||||
"""Send next track command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_previous_track()
|
||||
else:
|
||||
self._player.previous()
|
||||
self._player.previous()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def media_seek(self, position):
|
||||
"""Send seek command."""
|
||||
if self._coordinator:
|
||||
self._coordinator.media_seek(position)
|
||||
else:
|
||||
self._player.seek(str(datetime.timedelta(seconds=int(position))))
|
||||
self._player.seek(str(datetime.timedelta(seconds=int(position))))
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def clear_playlist(self):
|
||||
"""Clear players playlist."""
|
||||
if self._coordinator:
|
||||
self._coordinator.clear_playlist()
|
||||
else:
|
||||
self._player.clear_queue()
|
||||
self._player.clear_queue()
|
||||
|
||||
@soco_error
|
||||
def turn_on(self):
|
||||
"""Turn the media player on."""
|
||||
self.media_play()
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def play_media(self, media_type, media_id, **kwargs):
|
||||
"""
|
||||
Send the play_media command to the media player.
|
||||
|
||||
If ATTR_MEDIA_ENQUEUE is True, add `media_id` to the queue.
|
||||
"""
|
||||
if self._coordinator:
|
||||
self._coordinator.play_media(media_type, media_id, **kwargs)
|
||||
if kwargs.get(ATTR_MEDIA_ENQUEUE):
|
||||
from soco.exceptions import SoCoUPnPException
|
||||
try:
|
||||
self._player.add_uri_to_queue(media_id)
|
||||
except SoCoUPnPException:
|
||||
_LOGGER.error('Error parsing media uri "%s", '
|
||||
"please check it's a valid media resource "
|
||||
'supported by Sonos', media_id)
|
||||
else:
|
||||
if kwargs.get(ATTR_MEDIA_ENQUEUE):
|
||||
from soco.exceptions import SoCoUPnPException
|
||||
try:
|
||||
self._player.add_uri_to_queue(media_id)
|
||||
except SoCoUPnPException:
|
||||
_LOGGER.error('Error parsing media uri "%s", '
|
||||
"please check it's a valid media resource "
|
||||
'supported by Sonos', media_id)
|
||||
else:
|
||||
self._player.play_uri(media_id)
|
||||
self._player.play_uri(media_id)
|
||||
|
||||
@soco_error
|
||||
def join(self, master):
|
||||
"""Join the player to a group."""
|
||||
coord = [device for device in self.hass.data[DATA_SONOS]
|
||||
@@ -926,29 +945,26 @@ class SonosDevice(MediaPlayerDevice):
|
||||
|
||||
if coord and master != self.entity_id:
|
||||
coord = coord[0]
|
||||
if coord.soco_device.group.coordinator != coord.soco_device:
|
||||
coord.soco_device.unjoin()
|
||||
self._player.join(coord.soco_device)
|
||||
if coord.soco.group.coordinator != coord.soco:
|
||||
coord.soco.unjoin()
|
||||
self._player.join(coord.soco)
|
||||
self._coordinator = coord
|
||||
else:
|
||||
_LOGGER.error("Master not found %s", master)
|
||||
|
||||
@soco_error
|
||||
def unjoin(self):
|
||||
"""Unjoin the player from a group."""
|
||||
self._player.unjoin()
|
||||
self._coordinator = None
|
||||
|
||||
@soco_error
|
||||
def snapshot(self, with_group=True):
|
||||
"""Snapshot the player."""
|
||||
from soco.exceptions import SoCoException
|
||||
try:
|
||||
self.soco_snapshot.is_playing_queue = False
|
||||
self.soco_snapshot.is_coordinator = False
|
||||
self.soco_snapshot.snapshot()
|
||||
except SoCoException:
|
||||
_LOGGER.debug("Error on snapshot %s", self.entity_id)
|
||||
self._snapshot_group = None
|
||||
return
|
||||
from soco.snapshot import Snapshot
|
||||
|
||||
self._soco_snapshot = Snapshot(self._player)
|
||||
self._soco_snapshot.snapshot()
|
||||
|
||||
if with_group:
|
||||
self._snapshot_group = self._player.group
|
||||
@@ -957,14 +973,15 @@ class SonosDevice(MediaPlayerDevice):
|
||||
else:
|
||||
self._snapshot_group = None
|
||||
|
||||
@soco_error
|
||||
def restore(self, with_group=True):
|
||||
"""Restore snapshot for the player."""
|
||||
from soco.exceptions import SoCoException
|
||||
try:
|
||||
# need catch exception if a coordinator is going to slave.
|
||||
# this state will recover with group part.
|
||||
self.soco_snapshot.restore(False)
|
||||
except (TypeError, SoCoException):
|
||||
self._soco_snapshot.restore(False)
|
||||
except (TypeError, AttributeError, SoCoException):
|
||||
_LOGGER.debug("Error on restore %s", self.entity_id)
|
||||
|
||||
# restore groups
|
||||
@@ -1006,19 +1023,17 @@ class SonosDevice(MediaPlayerDevice):
|
||||
if s_dev != old.coordinator:
|
||||
s_dev.join(old.coordinator)
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def set_sleep_timer(self, sleep_time):
|
||||
"""Set the timer on the player."""
|
||||
if self._coordinator:
|
||||
self._coordinator.set_sleep_timer(sleep_time)
|
||||
else:
|
||||
self._player.set_sleep_timer(sleep_time)
|
||||
self._player.set_sleep_timer(sleep_time)
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def clear_sleep_timer(self):
|
||||
"""Clear the timer on the player."""
|
||||
if self._coordinator:
|
||||
self._coordinator.set_sleep_timer(None)
|
||||
else:
|
||||
self._player.set_sleep_timer(None)
|
||||
self._player.set_sleep_timer(None)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
|
@@ -46,7 +46,7 @@ MYSENSORS_GATEWAYS = 'mysensors_gateways'
|
||||
MQTT_COMPONENT = 'mqtt'
|
||||
REQUIREMENTS = [
|
||||
'https://github.com/theolind/pymysensors/archive/'
|
||||
'0b705119389be58332f17753c53167f551254b6c.zip#pymysensors==0.8']
|
||||
'ff3476b70edc9c995b939cddb9d51f8d2d018581.zip#pymysensors==0.9.0']
|
||||
|
||||
|
||||
def is_socket_address(value):
|
||||
@@ -104,8 +104,22 @@ def is_serial_port(value):
|
||||
return cv.isdevice(value)
|
||||
|
||||
|
||||
def deprecated(key):
|
||||
"""Mark key as deprecated in config."""
|
||||
def validator(config):
|
||||
"""Check if key is in config, log warning and remove key."""
|
||||
if key not in config:
|
||||
return config
|
||||
_LOGGER.warning(
|
||||
'%s option for %s is deprecated. Please remove %s from your '
|
||||
'configuration file.', key, DOMAIN, key)
|
||||
config.pop(key)
|
||||
return config
|
||||
return validator
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
DOMAIN: vol.Schema(vol.All(deprecated(CONF_DEBUG), {
|
||||
vol.Required(CONF_GATEWAYS): vol.All(
|
||||
cv.ensure_list, has_all_unique_files,
|
||||
[{
|
||||
@@ -125,12 +139,11 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
CONF_TOPIC_OUT_PREFIX, default=''): valid_publish_topic,
|
||||
}]
|
||||
),
|
||||
vol.Optional(CONF_DEBUG, default=False): cv.boolean,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
|
||||
vol.Optional(CONF_PERSISTENCE, default=True): cv.boolean,
|
||||
vol.Optional(CONF_RETAIN, default=True): cv.boolean,
|
||||
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.Coerce(float),
|
||||
})
|
||||
}))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
@@ -182,7 +195,6 @@ def setup(hass, config):
|
||||
# invalid ip address
|
||||
return
|
||||
gateway.metric = hass.config.units.is_metric
|
||||
gateway.debug = config[DOMAIN].get(CONF_DEBUG)
|
||||
optimistic = config[DOMAIN].get(CONF_OPTIMISTIC)
|
||||
gateway = GatewayWrapper(gateway, optimistic, device)
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
@@ -192,7 +204,14 @@ def setup(hass, config):
|
||||
"""Callback to trigger start of gateway and any persistence."""
|
||||
if persistence:
|
||||
for node_id in gateway.sensors:
|
||||
gateway.event_callback('persistence', node_id)
|
||||
node = gateway.sensors[node_id]
|
||||
for child_id in node.children:
|
||||
child = node.children[child_id]
|
||||
for value_type in child.values:
|
||||
msg = mysensors.Message().modify(
|
||||
node_id=node_id, child_id=child_id, type=1,
|
||||
sub_type=value_type)
|
||||
gateway.event_callback(msg)
|
||||
gateway.start()
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP,
|
||||
lambda event: gateway.stop())
|
||||
@@ -249,40 +268,38 @@ def setup(hass, config):
|
||||
|
||||
def pf_callback_factory(map_sv_types, devices, entity_class, add_devices=None):
|
||||
"""Return a new callback for the platform."""
|
||||
def mysensors_callback(gateway, node_id):
|
||||
def mysensors_callback(gateway, msg):
|
||||
"""Callback for mysensors platform."""
|
||||
if gateway.sensors[node_id].sketch_name is None:
|
||||
_LOGGER.info('No sketch_name: node %s', node_id)
|
||||
if gateway.sensors[msg.node_id].sketch_name is None:
|
||||
_LOGGER.debug('No sketch_name: node %s', msg.node_id)
|
||||
return
|
||||
|
||||
new_devices = []
|
||||
for child in gateway.sensors[node_id].children.values():
|
||||
for value_type in child.values.keys():
|
||||
key = node_id, child.id, value_type
|
||||
if child.type not in map_sv_types or \
|
||||
value_type not in map_sv_types[child.type]:
|
||||
continue
|
||||
if key in devices:
|
||||
if add_devices:
|
||||
devices[key].schedule_update_ha_state(True)
|
||||
else:
|
||||
devices[key].update()
|
||||
continue
|
||||
name = '{} {} {}'.format(
|
||||
gateway.sensors[node_id].sketch_name, node_id, child.id)
|
||||
if isinstance(entity_class, dict):
|
||||
device_class = entity_class[child.type]
|
||||
else:
|
||||
device_class = entity_class
|
||||
devices[key] = device_class(
|
||||
gateway, node_id, child.id, name, value_type, child.type)
|
||||
if add_devices:
|
||||
new_devices.append(devices[key])
|
||||
else:
|
||||
devices[key].update()
|
||||
if add_devices and new_devices:
|
||||
_LOGGER.info('Adding new devices: %s', new_devices)
|
||||
add_devices(new_devices, True)
|
||||
child = gateway.sensors[msg.node_id].children.get(msg.child_id)
|
||||
if child is None or child.values.get(msg.sub_type) is None:
|
||||
return
|
||||
key = msg.node_id, child.id, msg.sub_type
|
||||
if child.type not in map_sv_types or \
|
||||
msg.sub_type not in map_sv_types[child.type]:
|
||||
return
|
||||
if key in devices:
|
||||
if add_devices:
|
||||
devices[key].schedule_update_ha_state(True)
|
||||
else:
|
||||
devices[key].update()
|
||||
return
|
||||
name = '{} {} {}'.format(
|
||||
gateway.sensors[msg.node_id].sketch_name, msg.node_id,
|
||||
child.id)
|
||||
if isinstance(entity_class, dict):
|
||||
device_class = entity_class[child.type]
|
||||
else:
|
||||
device_class = entity_class
|
||||
devices[key] = device_class(
|
||||
gateway, msg.node_id, child.id, name, msg.sub_type)
|
||||
if add_devices:
|
||||
_LOGGER.info('Adding new devices: %s', [devices[key]])
|
||||
add_devices([devices[key]], True)
|
||||
else:
|
||||
devices[key].update()
|
||||
return mysensors_callback
|
||||
|
||||
|
||||
@@ -330,11 +347,13 @@ class GatewayWrapper(object):
|
||||
|
||||
def callback_factory(self):
|
||||
"""Return a new callback function."""
|
||||
def node_update(update_type, node_id):
|
||||
def node_update(msg):
|
||||
"""Callback for node updates from the MySensors gateway."""
|
||||
_LOGGER.debug('Update %s: node %s', update_type, node_id)
|
||||
_LOGGER.debug(
|
||||
'Update: node %s, child %s sub_type %s',
|
||||
msg.node_id, msg.child_id, msg.sub_type)
|
||||
for callback in self.platform_callbacks:
|
||||
callback(self, node_id)
|
||||
callback(self, msg)
|
||||
|
||||
return node_update
|
||||
|
||||
@@ -342,36 +361,15 @@ class GatewayWrapper(object):
|
||||
class MySensorsDeviceEntity(object):
|
||||
"""Represent a MySensors entity."""
|
||||
|
||||
def __init__(
|
||||
self, gateway, node_id, child_id, name, value_type, child_type):
|
||||
"""
|
||||
Setup class attributes on instantiation.
|
||||
|
||||
Args:
|
||||
gateway (GatewayWrapper): Gateway object.
|
||||
node_id (str): Id of node.
|
||||
child_id (str): Id of child.
|
||||
name (str): Entity name.
|
||||
value_type (str): Value type of child. Value is entity state.
|
||||
child_type (str): Child type of child.
|
||||
|
||||
Attributes:
|
||||
gateway (GatewayWrapper): Gateway object.
|
||||
node_id (str): Id of node.
|
||||
child_id (str): Id of child.
|
||||
_name (str): Entity name.
|
||||
value_type (str): Value type of child. Value is entity state.
|
||||
child_type (str): Child type of child.
|
||||
battery_level (int): Node battery level.
|
||||
_values (dict): Child values. Non state values set as state attributes.
|
||||
mysensors (module): Mysensors main component module.
|
||||
"""
|
||||
def __init__(self, gateway, node_id, child_id, name, value_type):
|
||||
"""Set up MySensors device."""
|
||||
self.gateway = gateway
|
||||
self.node_id = node_id
|
||||
self.child_id = child_id
|
||||
self._name = name
|
||||
self.value_type = value_type
|
||||
self.child_type = child_type
|
||||
child = gateway.sensors[node_id].children[child_id]
|
||||
self.child_type = child.type
|
||||
self._values = {}
|
||||
|
||||
@property
|
||||
|
@@ -19,7 +19,7 @@ from homeassistant.components.notify import (
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['jsonrpc-async==0.4']
|
||||
REQUIREMENTS = ['jsonrpc-async==0.6']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -35,7 +35,7 @@ from .util import session_scope
|
||||
|
||||
DOMAIN = 'recorder'
|
||||
|
||||
REQUIREMENTS = ['sqlalchemy==1.1.8']
|
||||
REQUIREMENTS = ['sqlalchemy==1.1.9']
|
||||
|
||||
DEFAULT_URL = 'sqlite:///{hass_config_path}'
|
||||
DEFAULT_DB_FILE = 'home-assistant_v2.db'
|
||||
|
@@ -19,7 +19,7 @@ import homeassistant.loader as loader
|
||||
|
||||
from requests.exceptions import HTTPError, ConnectTimeout
|
||||
|
||||
REQUIREMENTS = ['amcrest==1.1.5']
|
||||
REQUIREMENTS = ['amcrest==1.1.8']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -146,4 +146,4 @@ class AmcrestSensor(Entity):
|
||||
sd_total = self._camera.storage_total
|
||||
self._attrs['Total'] = '{0} {1}'.format(*sd_total)
|
||||
self._attrs['Used'] = '{0} {1}'.format(*sd_used)
|
||||
self._state = self._camera.percent(sd_used[0], sd_total[0])
|
||||
self._state = self._camera.storage_used_percent
|
||||
|
@@ -19,7 +19,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_BRAND = 'Brand'
|
||||
ATTR_HZ = 'GHz Advertised'
|
||||
ATTR_VENDOR = 'Vendor ID'
|
||||
ATTR_ARCH = 'arch'
|
||||
|
||||
DEFAULT_NAME = 'CPU speed'
|
||||
ICON = 'mdi:pulse'
|
||||
@@ -67,7 +67,7 @@ class CpuSpeedSensor(Entity):
|
||||
"""Return the state attributes."""
|
||||
if self.info is not None:
|
||||
return {
|
||||
ATTR_VENDOR: self.info['vendor_id'],
|
||||
ATTR_ARCH: self.info['arch'],
|
||||
ATTR_BRAND: self.info['brand'],
|
||||
ATTR_HZ: round(self.info['hz_advertised_raw'][0]/10**9, 2)
|
||||
}
|
||||
|
123
homeassistant/components/sensor/crimereports.py
Normal file
123
homeassistant/components/sensor/crimereports.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
Sensor for Crime Reports.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/sensor.crimereports/
|
||||
"""
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_INCLUDE, CONF_EXCLUDE, CONF_NAME, CONF_LATITUDE, CONF_LONGITUDE,
|
||||
ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE,
|
||||
LENGTH_KILOMETERS, LENGTH_METERS)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.distance import convert
|
||||
from homeassistant.util.dt import now
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['crimereports==1.0.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=30)
|
||||
DOMAIN = 'crimereports'
|
||||
EVENT_INCIDENT = '{}_incident'.format(DOMAIN)
|
||||
CONF_RADIUS = 'radius'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_RADIUS): vol.Coerce(float),
|
||||
vol.Inclusive(CONF_LATITUDE, 'coordinates'): cv.latitude,
|
||||
vol.Inclusive(CONF_LONGITUDE, 'coordinates'): cv.longitude,
|
||||
vol.Optional(CONF_INCLUDE): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_EXCLUDE): vol.All(cv.ensure_list, [cv.string])
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup the Crime Reports platform."""
|
||||
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
|
||||
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
|
||||
add_devices([CrimeReportsSensor(hass, config.get(CONF_NAME),
|
||||
latitude, longitude,
|
||||
config.get(CONF_RADIUS),
|
||||
config.get(CONF_INCLUDE),
|
||||
config.get(CONF_EXCLUDE))], True)
|
||||
|
||||
|
||||
class CrimeReportsSensor(Entity):
|
||||
"""Crime Reports Sensor."""
|
||||
|
||||
def __init__(self, hass, name, latitude, longitude, radius,
|
||||
include, exclude):
|
||||
"""Initialize the sensor."""
|
||||
import crimereports
|
||||
self._hass = hass
|
||||
self._name = name
|
||||
self._include = include
|
||||
self._exclude = exclude
|
||||
radius_kilometers = convert(radius, LENGTH_METERS, LENGTH_KILOMETERS)
|
||||
self._crimereports = crimereports.CrimeReports((latitude, longitude),
|
||||
radius_kilometers)
|
||||
self._attributes = None
|
||||
self._state = None
|
||||
self._previous_incidents = set()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return self._attributes
|
||||
|
||||
def _incident_event(self, incident):
|
||||
data = {
|
||||
'type': incident.get('type'),
|
||||
'description': incident.get('friendly_description'),
|
||||
'timestamp': incident.get('timestamp'),
|
||||
'location': incident.get('location')
|
||||
}
|
||||
if incident.get('coordinates'):
|
||||
data.update({
|
||||
ATTR_LATITUDE: incident.get('coordinates')[0],
|
||||
ATTR_LONGITUDE: incident.get('coordinates')[1]
|
||||
})
|
||||
self._hass.bus.fire(EVENT_INCIDENT, data)
|
||||
|
||||
def update(self):
|
||||
"""Update device state."""
|
||||
import crimereports
|
||||
incident_counts = defaultdict(int)
|
||||
incidents = self._crimereports.get_incidents(now().date(),
|
||||
include=self._include,
|
||||
exclude=self._exclude)
|
||||
fire_events = len(self._previous_incidents) > 0
|
||||
if len(incidents) < len(self._previous_incidents):
|
||||
self._previous_incidents = set()
|
||||
for incident in incidents:
|
||||
incident_type = slugify(incident.get('type'))
|
||||
incident_counts[incident_type] += 1
|
||||
if (fire_events and incident.get('id')
|
||||
not in self._previous_incidents):
|
||||
self._incident_event(incident)
|
||||
self._previous_incidents.add(incident.get('id'))
|
||||
self._attributes = {
|
||||
ATTR_ATTRIBUTION: crimereports.ATTRIBUTION
|
||||
}
|
||||
self._attributes.update(incident_counts)
|
||||
self._state = len(incidents)
|
@@ -16,12 +16,12 @@ import voluptuous as vol
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_USERNAME, CONF_PASSWORD,
|
||||
CONF_NAME, CONF_MONITORED_VARIABLES)
|
||||
CONF_NAME, CONF_MONITORED_VARIABLES, TEMP_CELSIUS)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyhydroquebec==1.0.0']
|
||||
REQUIREMENTS = ['pyhydroquebec==1.1.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -36,28 +36,32 @@ REQUESTS_TIMEOUT = 15
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
|
||||
|
||||
SENSOR_TYPES = {
|
||||
'period_total_bill': ['Current period bill',
|
||||
PRICE, 'mdi:square-inc-cash'],
|
||||
'period_length': ['Current period length',
|
||||
DAYS, 'mdi:calendar-today'],
|
||||
'period_total_days': ['Total number of days in this period',
|
||||
DAYS, 'mdi:calendar-today'],
|
||||
'period_mean_daily_bill': ['Period daily average bill',
|
||||
PRICE, 'mdi:square-inc-cash'],
|
||||
'period_mean_daily_consumption': ['Period daily average consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_total_consumption': ['Total Consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_lower_price_consumption': ['Period Lower price consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_higher_price_consumption': ['Period Higher price consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_total_consumption': ['Yesterday total consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_lower_price_consumption': ['Yesterday lower price consumption',
|
||||
KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_total_bill':
|
||||
['Current period bill', PRICE, 'mdi:square-inc-cash'],
|
||||
'period_length':
|
||||
['Current period length', DAYS, 'mdi:calendar-today'],
|
||||
'period_total_days':
|
||||
['Total number of days in this period', DAYS, 'mdi:calendar-today'],
|
||||
'period_mean_daily_bill':
|
||||
['Period daily average bill', PRICE, 'mdi:square-inc-cash'],
|
||||
'period_mean_daily_consumption':
|
||||
['Period daily average consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_total_consumption':
|
||||
['Total Consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_lower_price_consumption':
|
||||
['Period Lower price consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'period_higher_price_consumption':
|
||||
['Period Higher price consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_total_consumption':
|
||||
['Yesterday total consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_lower_price_consumption':
|
||||
['Yesterday lower price consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_higher_price_consumption':
|
||||
['Yesterday higher price consumption', KILOWATT_HOUR, 'mdi:flash'],
|
||||
'yesterday_average_temperature':
|
||||
['Yesterday average temperature', TEMP_CELSIUS, 'mdi:thermometer'],
|
||||
'period_average_temperature':
|
||||
['Period average temperature', TEMP_CELSIUS, 'mdi:thermometer'],
|
||||
}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
|
@@ -98,6 +98,7 @@ class MQTTRoomSensor(Entity):
|
||||
|
||||
self.hass.async_add_job(self.async_update_ha_state())
|
||||
|
||||
@callback
|
||||
def message_received(topic, payload, qos):
|
||||
"""A new MQTT message has been received."""
|
||||
try:
|
||||
|
@@ -316,3 +316,72 @@ ffmpeg:
|
||||
logger:
|
||||
set_level:
|
||||
description: Set log level for components.
|
||||
|
||||
hassio:
|
||||
host_reboot:
|
||||
description: Reboot host computer.
|
||||
|
||||
host_shutdown:
|
||||
description: Poweroff host computer.
|
||||
|
||||
host_update:
|
||||
description: Update host computer.
|
||||
fields:
|
||||
version:
|
||||
description: Optional or it will be use the latest version.
|
||||
example: '0.3'
|
||||
|
||||
supervisor_update:
|
||||
description: Update HassIO supervisor.
|
||||
fields:
|
||||
version:
|
||||
description: Optional or it will be use the latest version.
|
||||
example: '0.3'
|
||||
|
||||
homeassistant_update:
|
||||
description: Update HomeAssistant docker image.
|
||||
fields:
|
||||
version:
|
||||
description: Optional or it will be use the latest version.
|
||||
example: '0.40.1'
|
||||
|
||||
addon_install:
|
||||
description: Install a HassIO docker addon.
|
||||
fields:
|
||||
addon:
|
||||
description: Name of addon.
|
||||
example: 'smb_config'
|
||||
version:
|
||||
description: Optional or it will be use the latest version.
|
||||
example: '0.2'
|
||||
|
||||
addon_uninstall:
|
||||
description: Uninstall a HassIO docker addon.
|
||||
fields:
|
||||
addon:
|
||||
description: Name of addon.
|
||||
example: 'smb_config'
|
||||
|
||||
addon_update:
|
||||
description: Update a HassIO docker addon.
|
||||
fields:
|
||||
addon:
|
||||
description: Name of addon.
|
||||
example: 'smb_config'
|
||||
version:
|
||||
description: Optional or it will be use the latest version.
|
||||
example: '0.2'
|
||||
|
||||
addon_start:
|
||||
description: Start a HassIO docker addon.
|
||||
fields:
|
||||
addon:
|
||||
description: Name of addon.
|
||||
example: 'smb_config'
|
||||
|
||||
addon_stop:
|
||||
description: Stop a HassIO docker addon.
|
||||
fields:
|
||||
addon:
|
||||
description: Name of addon.
|
||||
example: 'smb_config'
|
||||
|
@@ -30,6 +30,10 @@ STATE_BELOW_HORIZON = 'below_horizon'
|
||||
|
||||
STATE_ATTR_AZIMUTH = 'azimuth'
|
||||
STATE_ATTR_ELEVATION = 'elevation'
|
||||
STATE_ATTR_NEXT_DAWN = 'next_dawn'
|
||||
STATE_ATTR_NEXT_DUSK = 'next_dusk'
|
||||
STATE_ATTR_NEXT_MIDNIGHT = 'next_midnight'
|
||||
STATE_ATTR_NEXT_NOON = 'next_noon'
|
||||
STATE_ATTR_NEXT_RISING = 'next_rising'
|
||||
STATE_ATTR_NEXT_SETTING = 'next_setting'
|
||||
|
||||
@@ -47,6 +51,118 @@ def is_on(hass, entity_id=None):
|
||||
return hass.states.is_state(entity_id, STATE_ABOVE_HORIZON)
|
||||
|
||||
|
||||
def next_dawn(hass, entity_id=None):
|
||||
"""Local datetime object of the next dawn.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
utc_next = next_dawn_utc(hass, entity_id)
|
||||
|
||||
return dt_util.as_local(utc_next) if utc_next else None
|
||||
|
||||
|
||||
def next_dawn_utc(hass, entity_id=None):
|
||||
"""UTC datetime object of the next dawn.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = entity_id or ENTITY_ID
|
||||
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
|
||||
try:
|
||||
return dt_util.parse_datetime(
|
||||
state.attributes[STATE_ATTR_NEXT_DAWN])
|
||||
except (AttributeError, KeyError):
|
||||
# AttributeError if state is None
|
||||
# KeyError if STATE_ATTR_NEXT_DAWN does not exist
|
||||
return None
|
||||
|
||||
|
||||
def next_dusk(hass, entity_id=None):
|
||||
"""Local datetime object of the next dusk.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
utc_next = next_dusk_utc(hass, entity_id)
|
||||
|
||||
return dt_util.as_local(utc_next) if utc_next else None
|
||||
|
||||
|
||||
def next_dusk_utc(hass, entity_id=None):
|
||||
"""UTC datetime object of the next dusk.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = entity_id or ENTITY_ID
|
||||
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
|
||||
try:
|
||||
return dt_util.parse_datetime(
|
||||
state.attributes[STATE_ATTR_NEXT_DUSK])
|
||||
except (AttributeError, KeyError):
|
||||
# AttributeError if state is None
|
||||
# KeyError if STATE_ATTR_NEXT_DUSK does not exist
|
||||
return None
|
||||
|
||||
|
||||
def next_midnight(hass, entity_id=None):
|
||||
"""Local datetime object of the next midnight.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
utc_next = next_midnight_utc(hass, entity_id)
|
||||
|
||||
return dt_util.as_local(utc_next) if utc_next else None
|
||||
|
||||
|
||||
def next_midnight_utc(hass, entity_id=None):
|
||||
"""UTC datetime object of the next midnight.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = entity_id or ENTITY_ID
|
||||
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
|
||||
try:
|
||||
return dt_util.parse_datetime(
|
||||
state.attributes[STATE_ATTR_NEXT_MIDNIGHT])
|
||||
except (AttributeError, KeyError):
|
||||
# AttributeError if state is None
|
||||
# KeyError if STATE_ATTR_NEXT_MIDNIGHT does not exist
|
||||
return None
|
||||
|
||||
|
||||
def next_noon(hass, entity_id=None):
|
||||
"""Local datetime object of the next solar noon.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
utc_next = next_noon_utc(hass, entity_id)
|
||||
|
||||
return dt_util.as_local(utc_next) if utc_next else None
|
||||
|
||||
|
||||
def next_noon_utc(hass, entity_id=None):
|
||||
"""UTC datetime object of the next noon.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = entity_id or ENTITY_ID
|
||||
|
||||
state = hass.states.get(ENTITY_ID)
|
||||
|
||||
try:
|
||||
return dt_util.parse_datetime(
|
||||
state.attributes[STATE_ATTR_NEXT_NOON])
|
||||
except (AttributeError, KeyError):
|
||||
# AttributeError if state is None
|
||||
# KeyError if STATE_ATTR_NEXT_NOON does not exist
|
||||
return None
|
||||
|
||||
|
||||
def next_setting(hass, entity_id=None):
|
||||
"""Local datetime object of the next sun setting.
|
||||
|
||||
@@ -153,6 +269,8 @@ class Sun(Entity):
|
||||
self.hass = hass
|
||||
self.location = location
|
||||
self._state = self.next_rising = self.next_setting = None
|
||||
self.next_dawn = self.next_dusk = None
|
||||
self.next_midnight = self.next_noon = None
|
||||
self.solar_elevation = self.solar_azimuth = 0
|
||||
|
||||
track_utc_time_change(hass, self.timer_update, second=30)
|
||||
@@ -174,6 +292,10 @@ class Sun(Entity):
|
||||
def state_attributes(self):
|
||||
"""Return the state attributes of the sun."""
|
||||
return {
|
||||
STATE_ATTR_NEXT_DAWN: self.next_dawn.isoformat(),
|
||||
STATE_ATTR_NEXT_DUSK: self.next_dusk.isoformat(),
|
||||
STATE_ATTR_NEXT_MIDNIGHT: self.next_midnight.isoformat(),
|
||||
STATE_ATTR_NEXT_NOON: self.next_noon.isoformat(),
|
||||
STATE_ATTR_NEXT_RISING: self.next_rising.isoformat(),
|
||||
STATE_ATTR_NEXT_SETTING: self.next_setting.isoformat(),
|
||||
STATE_ATTR_ELEVATION: round(self.solar_elevation, 2),
|
||||
@@ -183,36 +305,41 @@ class Sun(Entity):
|
||||
@property
|
||||
def next_change(self):
|
||||
"""Datetime when the next change to the state is."""
|
||||
return min(self.next_rising, self.next_setting)
|
||||
return min(self.next_dawn, self.next_dusk, self.next_midnight,
|
||||
self.next_noon, self.next_rising, self.next_setting)
|
||||
|
||||
def update_as_of(self, utc_point_in_time):
|
||||
@staticmethod
|
||||
def get_next_solar_event(callable_on_astral_location,
|
||||
utc_point_in_time, mod, increment):
|
||||
"""Calculate sun state at a point in UTC time."""
|
||||
import astral
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
try:
|
||||
next_rising_dt = self.location.sunrise(
|
||||
next_dt = callable_on_astral_location(
|
||||
utc_point_in_time + timedelta(days=mod), local=False)
|
||||
if next_rising_dt > utc_point_in_time:
|
||||
if next_dt > utc_point_in_time:
|
||||
break
|
||||
except astral.AstralError:
|
||||
pass
|
||||
mod += 1
|
||||
mod += increment
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
try:
|
||||
next_setting_dt = (self.location.sunset(
|
||||
utc_point_in_time + timedelta(days=mod), local=False))
|
||||
if next_setting_dt > utc_point_in_time:
|
||||
break
|
||||
except astral.AstralError:
|
||||
pass
|
||||
mod += 1
|
||||
return next_dt
|
||||
|
||||
self.next_rising = next_rising_dt
|
||||
self.next_setting = next_setting_dt
|
||||
def update_as_of(self, utc_point_in_time):
|
||||
"""Update the attributes containing solar events."""
|
||||
self.next_dawn = Sun.get_next_solar_event(
|
||||
self.location.dawn, utc_point_in_time, -1, 1)
|
||||
self.next_dusk = Sun.get_next_solar_event(
|
||||
self.location.dusk, utc_point_in_time, -1, 1)
|
||||
self.next_midnight = Sun.get_next_solar_event(
|
||||
self.location.solar_midnight, utc_point_in_time, -1, 1)
|
||||
self.next_noon = Sun.get_next_solar_event(
|
||||
self.location.solar_noon, utc_point_in_time, -1, 1)
|
||||
self.next_rising = Sun.get_next_solar_event(
|
||||
self.location.sunrise, utc_point_in_time, -1, 1)
|
||||
self.next_setting = Sun.get_next_solar_event(
|
||||
self.location.sunset, utc_point_in_time, -1, 1)
|
||||
|
||||
def update_sun_position(self, utc_point_in_time):
|
||||
"""Calculate the position of the sun."""
|
||||
|
@@ -47,12 +47,12 @@ class LiteJetSwitch(SwitchDevice):
|
||||
def _on_switch_pressed(self):
|
||||
_LOGGER.debug("Updating pressed for %s", self._name)
|
||||
self._state = True
|
||||
self._hass.async_add_job(self.async_update_ha_state())
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def _on_switch_released(self):
|
||||
_LOGGER.debug("Updating released for %s", self._name)
|
||||
self._state = False
|
||||
self._hass.async_add_job(self.async_update_ha_state())
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@@ -14,7 +14,7 @@ from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.const import (CONF_HOST, CONF_NAME)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyHS100==0.2.4.1']
|
||||
REQUIREMENTS = ['pyHS100==0.2.4.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -14,7 +14,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
|
||||
REQUIREMENTS = ['pywemo==0.4.16']
|
||||
REQUIREMENTS = ['pywemo==0.4.17']
|
||||
|
||||
DOMAIN = 'wemo'
|
||||
|
||||
|
@@ -13,6 +13,7 @@ from pprint import pprint
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import CoreState
|
||||
from homeassistant.loader import get_platform
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@@ -124,7 +125,7 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_DEVICE_CONFIG, default={}):
|
||||
vol.Schema({cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
||||
vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}):
|
||||
vol.Schema({cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
||||
cv.ordered_dict(DEVICE_CONFIG_SCHEMA_ENTRY, cv.string),
|
||||
vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}):
|
||||
vol.Schema({cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}),
|
||||
vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean,
|
||||
@@ -185,8 +186,8 @@ def get_config_value(node, value_index, tries=5):
|
||||
"""Return the current configuration value for a specific index."""
|
||||
try:
|
||||
for value in node.values.values():
|
||||
# 112 == config command class
|
||||
if value.command_class == 112 and value.index == value_index:
|
||||
if (value.command_class == const.COMMAND_CLASS_CONFIGURATION
|
||||
and value.index == value_index):
|
||||
return value.data
|
||||
except RuntimeError:
|
||||
# If we get an runtime error the dict has changed while
|
||||
@@ -201,14 +202,15 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Generic Z-Wave platform setup."""
|
||||
if discovery_info is None or NETWORK is None:
|
||||
return False
|
||||
|
||||
device = hass.data[DATA_ZWAVE_DICT].pop(
|
||||
discovery_info[const.DISCOVERY_DEVICE])
|
||||
if device:
|
||||
async_add_devices([device])
|
||||
return True
|
||||
else:
|
||||
discovery_info[const.DISCOVERY_DEVICE], None)
|
||||
if device is None:
|
||||
return False
|
||||
|
||||
async_add_devices([device])
|
||||
return True
|
||||
|
||||
|
||||
# pylint: disable=R0914
|
||||
def setup(hass, config):
|
||||
@@ -258,7 +260,7 @@ def setup(hass, config):
|
||||
NETWORK = ZWaveNetwork(options, autostart=False)
|
||||
hass.data[DATA_ZWAVE_DICT] = {}
|
||||
|
||||
if use_debug:
|
||||
if use_debug: # pragma: no cover
|
||||
def log_all(signal, value=None):
|
||||
"""Log all the signals."""
|
||||
print("")
|
||||
@@ -384,11 +386,11 @@ def setup(hass, config):
|
||||
_LOGGER.info("Zwave test_network have been initialized.")
|
||||
NETWORK.test()
|
||||
|
||||
def stop_zwave(_service_or_event):
|
||||
def stop_network(_service_or_event):
|
||||
"""Stop Z-Wave network."""
|
||||
_LOGGER.info("Stopping ZWave network.")
|
||||
NETWORK.stop()
|
||||
if hass.state == 'RUNNING':
|
||||
if hass.state == CoreState.running:
|
||||
hass.bus.fire(const.EVENT_NETWORK_STOP)
|
||||
|
||||
def rename_node(service):
|
||||
@@ -532,7 +534,7 @@ def setup(hass, config):
|
||||
poll_interval = NETWORK.get_poll_interval()
|
||||
_LOGGER.info("zwave polling interval set to %d ms", poll_interval)
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zwave)
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network)
|
||||
|
||||
# Register node services for Z-Wave network
|
||||
hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node,
|
||||
@@ -553,7 +555,8 @@ def setup(hass, config):
|
||||
hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK,
|
||||
test_network,
|
||||
descriptions[const.SERVICE_TEST_NETWORK])
|
||||
hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK, stop_zwave,
|
||||
hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK,
|
||||
stop_network,
|
||||
descriptions[const.SERVICE_STOP_NETWORK])
|
||||
hass.services.register(DOMAIN, const.SERVICE_START_NETWORK,
|
||||
start_zwave,
|
||||
@@ -732,7 +735,7 @@ class ZWaveDeviceEntityValues():
|
||||
device = platform.get_device(
|
||||
node=self._node, values=self,
|
||||
node_config=node_config, hass=self._hass)
|
||||
if not device:
|
||||
if device is None:
|
||||
# No entity will be created for this value
|
||||
self._workaround_ignore = True
|
||||
return
|
||||
@@ -840,4 +843,5 @@ class ZWaveDeviceEntity(ZWaveBaseEntity):
|
||||
def refresh_from_network(self):
|
||||
"""Refresh all dependent values from zwave network."""
|
||||
for value in self.values:
|
||||
self.node.refresh_value(value.value_id)
|
||||
if value is not None:
|
||||
self.node.refresh_value(value.value_id)
|
||||
|
@@ -108,7 +108,7 @@ CUSTOMIZE_CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}):
|
||||
vol.Schema({cv.string: dict}),
|
||||
vol.Optional(CONF_CUSTOMIZE_GLOB, default={}):
|
||||
vol.Schema({cv.string: dict}),
|
||||
cv.ordered_dict(OrderedDict, cv.string),
|
||||
})
|
||||
|
||||
CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend({
|
||||
|
@@ -361,7 +361,6 @@ URL_API_EVENTS = '/api/events'
|
||||
URL_API_EVENTS_EVENT = '/api/events/{}'
|
||||
URL_API_SERVICES = '/api/services'
|
||||
URL_API_SERVICES_SERVICE = '/api/services/{}/{}'
|
||||
URL_API_EVENT_FORWARD = '/api/event_forwarding'
|
||||
URL_API_COMPONENTS = '/api/components'
|
||||
URL_API_ERROR_LOG = '/api/error_log'
|
||||
URL_API_LOG_OUT = '/api/log_out'
|
||||
|
@@ -18,6 +18,7 @@ from time import monotonic
|
||||
from types import MappingProxyType
|
||||
from typing import Optional, Any, Callable, List # NOQA
|
||||
|
||||
from async_timeout import timeout
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -29,20 +30,15 @@ from homeassistant.const import (
|
||||
EVENT_TIME_CHANGED, MATCH_ALL, EVENT_HOMEASSISTANT_CLOSE,
|
||||
EVENT_SERVICE_REMOVED, __version__)
|
||||
from homeassistant.exceptions import (
|
||||
HomeAssistantError, InvalidEntityFormatError, ShuttingDown)
|
||||
HomeAssistantError, InvalidEntityFormatError)
|
||||
from homeassistant.util.async import (
|
||||
run_coroutine_threadsafe, run_callback_threadsafe)
|
||||
run_coroutine_threadsafe, run_callback_threadsafe,
|
||||
fire_coroutine_threadsafe)
|
||||
import homeassistant.util as util
|
||||
import homeassistant.util.dt as dt_util
|
||||
import homeassistant.util.location as location
|
||||
from homeassistant.util.unit_system import UnitSystem, METRIC_SYSTEM # NOQA
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DOMAIN = 'homeassistant'
|
||||
|
||||
# How long we wait for the result of a service call
|
||||
@@ -54,6 +50,8 @@ ENTITY_ID_PATTERN = re.compile(r"^(\w+)\.(\w+)$")
|
||||
# Size of a executor pool
|
||||
EXECUTOR_POOL_SIZE = 10
|
||||
|
||||
# How long to wait till things that run on startup have to finish.
|
||||
TIMEOUT_EVENT_START = 15
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,10 +84,6 @@ def async_loop_exception_handler(loop, context):
|
||||
kwargs = {}
|
||||
exception = context.get('exception')
|
||||
if exception:
|
||||
# Do not report on shutting down exceptions.
|
||||
if isinstance(exception, ShuttingDown):
|
||||
return
|
||||
|
||||
kwargs['exc_info'] = (type(exception), exception,
|
||||
exception.__traceback__)
|
||||
|
||||
@@ -123,7 +117,7 @@ class HomeAssistant(object):
|
||||
self.loop.set_default_executor(self.executor)
|
||||
self.loop.set_exception_handler(async_loop_exception_handler)
|
||||
self._pending_tasks = []
|
||||
self._track_task = False
|
||||
self._track_task = True
|
||||
self.bus = EventBus(self)
|
||||
self.services = ServiceRegistry(self)
|
||||
self.states = StateMachine(self.bus, self.loop)
|
||||
@@ -141,15 +135,17 @@ class HomeAssistant(object):
|
||||
def start(self) -> None:
|
||||
"""Start home assistant."""
|
||||
# Register the async start
|
||||
self.add_job(self.async_start())
|
||||
fire_coroutine_threadsafe(self.async_start(), self.loop)
|
||||
|
||||
# Run forever and catch keyboard interrupt
|
||||
try:
|
||||
# Block until stopped
|
||||
_LOGGER.info("Starting Home Assistant core loop")
|
||||
self.loop.run_forever()
|
||||
return self.exit_code
|
||||
except KeyboardInterrupt:
|
||||
self.loop.create_task(self.async_stop())
|
||||
self.loop.call_soon_threadsafe(
|
||||
self.loop.create_task, self.async_stop())
|
||||
self.loop.run_forever()
|
||||
finally:
|
||||
self.loop.close()
|
||||
@@ -165,9 +161,21 @@ class HomeAssistant(object):
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.loop._thread_ident = threading.get_ident()
|
||||
_async_create_timer(self)
|
||||
self.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
|
||||
try:
|
||||
with timeout(TIMEOUT_EVENT_START, loop=self.loop):
|
||||
yield from self.async_stop_track_tasks()
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning(
|
||||
'Something is blocking Home Assistant from wrapping up the '
|
||||
'start up phase. We\'re going to continue anyway. Please '
|
||||
'report the following info at http://bit.ly/2ogP58T : %s',
|
||||
', '.join(self.config.components))
|
||||
self._track_task = False
|
||||
|
||||
self.state = CoreState.running
|
||||
_async_create_timer(self)
|
||||
|
||||
def add_job(self, target: Callable[..., None], *args: Any) -> None:
|
||||
"""Add job to the executor pool.
|
||||
@@ -238,6 +246,8 @@ class HomeAssistant(object):
|
||||
@asyncio.coroutine
|
||||
def async_block_till_done(self):
|
||||
"""Block till all pending work is done."""
|
||||
assert self._track_task, 'Not tracking tasks'
|
||||
|
||||
# To flush out any call_soon_threadsafe
|
||||
yield from asyncio.sleep(0, loop=self.loop)
|
||||
|
||||
@@ -252,7 +262,7 @@ class HomeAssistant(object):
|
||||
|
||||
def stop(self) -> None:
|
||||
"""Stop Home Assistant and shuts down all threads."""
|
||||
run_coroutine_threadsafe(self.async_stop(), self.loop)
|
||||
fire_coroutine_threadsafe(self.async_stop(), self.loop)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_stop(self, exit_code=0) -> None:
|
||||
@@ -368,10 +378,6 @@ class EventBus(object):
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
if event_type != EVENT_HOMEASSISTANT_STOP and \
|
||||
self._hass.state == CoreState.stopping:
|
||||
raise ShuttingDown("Home Assistant is shutting down")
|
||||
|
||||
listeners = self._listeners.get(event_type, [])
|
||||
|
||||
# EVENT_HOMEASSISTANT_CLOSE should go only to his listeners
|
||||
@@ -1100,17 +1106,13 @@ def _async_create_timer(hass):
|
||||
|
||||
handle = hass.loop.call_later(slp_seconds, fire_time_event, nxt)
|
||||
|
||||
@callback
|
||||
def start_timer(event):
|
||||
"""Create an async timer."""
|
||||
_LOGGER.info("Timer:starting")
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_timer)
|
||||
fire_time_event(monotonic())
|
||||
|
||||
@callback
|
||||
def stop_timer(event):
|
||||
"""Stop the timer."""
|
||||
if handle is not None:
|
||||
handle.cancel()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_timer)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_timer)
|
||||
|
||||
_LOGGER.info("Timer:starting")
|
||||
fire_time_event(monotonic())
|
||||
|
@@ -7,12 +7,6 @@ class HomeAssistantError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ShuttingDown(HomeAssistantError):
|
||||
"""When trying to change something during shutdown."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidEntityFormatError(HomeAssistantError):
|
||||
"""When an invalid formatted entity is encountered."""
|
||||
|
||||
|
@@ -7,23 +7,19 @@ HomeAssistantError will be raised.
|
||||
For more details about the Python API, please refer to the documentation at
|
||||
https://home-assistant.io/developers/python_api/
|
||||
"""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime
|
||||
import enum
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
import urllib.parse
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
from homeassistant import setup, core as ha
|
||||
from homeassistant import core as ha
|
||||
from homeassistant.const import (
|
||||
HTTP_HEADER_HA_AUTH, SERVER_PORT, URL_API, URL_API_EVENT_FORWARD,
|
||||
HTTP_HEADER_HA_AUTH, SERVER_PORT, URL_API,
|
||||
URL_API_EVENTS, URL_API_EVENTS_EVENT, URL_API_SERVICES, URL_API_CONFIG,
|
||||
URL_API_SERVICES_SERVICE, URL_API_STATES, URL_API_STATES_ENTITY,
|
||||
HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON)
|
||||
@@ -116,195 +112,6 @@ class API(object):
|
||||
self.base_url, 'yes' if self.api_password is not None else 'no')
|
||||
|
||||
|
||||
class HomeAssistant(ha.HomeAssistant):
|
||||
"""Home Assistant that forwards work."""
|
||||
|
||||
# pylint: disable=super-init-not-called
|
||||
def __init__(self, remote_api, local_api=None, loop=None):
|
||||
"""Initalize the forward instance."""
|
||||
_LOGGER.warning('Remote instances of Home Assistant are deprecated. '
|
||||
'Will be removed by 0.43')
|
||||
if not remote_api.validate_api():
|
||||
raise HomeAssistantError(
|
||||
"Remote API at {}:{} not valid: {}".format(
|
||||
remote_api.host, remote_api.port, remote_api.status))
|
||||
|
||||
self.remote_api = remote_api
|
||||
|
||||
self.loop = loop or asyncio.get_event_loop()
|
||||
self.executor = ThreadPoolExecutor(max_workers=5)
|
||||
self.loop.set_default_executor(self.executor)
|
||||
self.loop.set_exception_handler(ha.async_loop_exception_handler)
|
||||
self._pending_tasks = []
|
||||
self._pending_sheduler = None
|
||||
|
||||
self.bus = EventBus(remote_api, self)
|
||||
self.services = ha.ServiceRegistry(self)
|
||||
self.states = StateMachine(self.bus, self.loop, self.remote_api)
|
||||
self.config = ha.Config()
|
||||
# This is a dictionary that any component can store any data on.
|
||||
self.data = {}
|
||||
self.state = ha.CoreState.not_running
|
||||
self.exit_code = None
|
||||
self.config.api = local_api
|
||||
|
||||
def start(self):
|
||||
"""Start the instance."""
|
||||
# Ensure a local API exists to connect with remote
|
||||
if 'api' not in self.config.components:
|
||||
if not setup.setup_component(self, 'api'):
|
||||
raise HomeAssistantError(
|
||||
'Unable to setup local API to receive events')
|
||||
|
||||
self.state = ha.CoreState.starting
|
||||
# pylint: disable=protected-access
|
||||
ha._async_create_timer(self)
|
||||
|
||||
self.bus.fire(ha.EVENT_HOMEASSISTANT_START,
|
||||
origin=ha.EventOrigin.remote)
|
||||
|
||||
# Ensure local HTTP is started
|
||||
self.block_till_done()
|
||||
self.state = ha.CoreState.running
|
||||
time.sleep(0.05)
|
||||
|
||||
# Setup that events from remote_api get forwarded to local_api
|
||||
# Do this after we are running, otherwise HTTP is not started
|
||||
# or requests are blocked
|
||||
if not connect_remote_events(self.remote_api, self.config.api):
|
||||
raise HomeAssistantError((
|
||||
'Could not setup event forwarding from api {} to '
|
||||
'local api {}').format(self.remote_api, self.config.api))
|
||||
|
||||
def stop(self):
|
||||
"""Stop Home Assistant and shuts down all threads."""
|
||||
_LOGGER.info("Stopping")
|
||||
self.state = ha.CoreState.stopping
|
||||
|
||||
self.bus.fire(ha.EVENT_HOMEASSISTANT_STOP,
|
||||
origin=ha.EventOrigin.remote)
|
||||
|
||||
# Disconnect master event forwarding
|
||||
disconnect_remote_events(self.remote_api, self.config.api)
|
||||
self.state = ha.CoreState.not_running
|
||||
|
||||
|
||||
class EventBus(ha.EventBus):
|
||||
"""EventBus implementation that forwards fire_event to remote API."""
|
||||
|
||||
def __init__(self, api, hass):
|
||||
"""Initalize the eventbus."""
|
||||
super().__init__(hass)
|
||||
self._api = api
|
||||
|
||||
def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local):
|
||||
"""Forward local events to remote target.
|
||||
|
||||
Handles remote event as usual.
|
||||
"""
|
||||
# All local events that are not TIME_CHANGED are forwarded to API
|
||||
if origin == ha.EventOrigin.local and \
|
||||
event_type != ha.EVENT_TIME_CHANGED:
|
||||
|
||||
fire_event(self._api, event_type, event_data)
|
||||
|
||||
else:
|
||||
super().fire(event_type, event_data, origin)
|
||||
|
||||
|
||||
class EventForwarder(object):
|
||||
"""Listens for events and forwards to specified APIs."""
|
||||
|
||||
def __init__(self, hass, restrict_origin=None):
|
||||
"""Initalize the event forwarder."""
|
||||
_LOGGER.warning('API forwarding is deprecated. '
|
||||
'Will be removed by 0.43')
|
||||
|
||||
self.hass = hass
|
||||
self.restrict_origin = restrict_origin
|
||||
|
||||
# We use a tuple (host, port) as key to ensure
|
||||
# that we do not forward to the same host twice
|
||||
self._targets = {}
|
||||
|
||||
self._lock = threading.Lock()
|
||||
self._async_unsub_listener = None
|
||||
|
||||
@ha.callback
|
||||
def async_connect(self, api):
|
||||
"""Attach to a Home Assistant instance and forward events.
|
||||
|
||||
Will overwrite old target if one exists with same host/port.
|
||||
"""
|
||||
if self._async_unsub_listener is None:
|
||||
self._async_unsub_listener = self.hass.bus.async_listen(
|
||||
ha.MATCH_ALL, self._event_listener)
|
||||
|
||||
key = (api.host, api.port)
|
||||
|
||||
self._targets[key] = api
|
||||
|
||||
@ha.callback
|
||||
def async_disconnect(self, api):
|
||||
"""Remove target from being forwarded to."""
|
||||
key = (api.host, api.port)
|
||||
|
||||
did_remove = self._targets.pop(key, None) is None
|
||||
|
||||
if len(self._targets) == 0:
|
||||
# Remove event listener if no forwarding targets present
|
||||
self._async_unsub_listener()
|
||||
self._async_unsub_listener = None
|
||||
|
||||
return did_remove
|
||||
|
||||
def _event_listener(self, event):
|
||||
"""Listen and forward all events."""
|
||||
with self._lock:
|
||||
# We don't forward time events or, if enabled, non-local events
|
||||
if event.event_type == ha.EVENT_TIME_CHANGED or \
|
||||
(self.restrict_origin and event.origin != self.restrict_origin):
|
||||
return
|
||||
|
||||
for api in self._targets.values():
|
||||
fire_event(api, event.event_type, event.data)
|
||||
|
||||
|
||||
class StateMachine(ha.StateMachine):
|
||||
"""Fire set events to an API. Uses state_change events to track states."""
|
||||
|
||||
def __init__(self, bus, loop, api):
|
||||
"""Initalize the statemachine."""
|
||||
super().__init__(bus, loop)
|
||||
self._api = api
|
||||
self.mirror()
|
||||
|
||||
bus.listen(ha.EVENT_STATE_CHANGED, self._state_changed_listener)
|
||||
|
||||
def remove(self, entity_id):
|
||||
"""Remove the state of an entity.
|
||||
|
||||
Returns boolean to indicate if an entity was removed.
|
||||
"""
|
||||
return remove_state(self._api, entity_id)
|
||||
|
||||
def set(self, entity_id, new_state, attributes=None, force_update=False):
|
||||
"""Call set_state on remote API."""
|
||||
set_state(self._api, entity_id, new_state, attributes, force_update)
|
||||
|
||||
def mirror(self):
|
||||
"""Discard current data and mirrors the remote state machine."""
|
||||
self._states = {state.entity_id: state for state
|
||||
in get_states(self._api)}
|
||||
|
||||
def _state_changed_listener(self, event):
|
||||
"""Listen for state changed events and applies them."""
|
||||
if event.data['new_state'] is None:
|
||||
self._states.pop(event.data['entity_id'], None)
|
||||
else:
|
||||
self._states[event.data['entity_id']] = event.data['new_state']
|
||||
|
||||
|
||||
class JSONEncoder(json.JSONEncoder):
|
||||
"""JSONEncoder that supports Home Assistant objects."""
|
||||
|
||||
@@ -352,59 +159,6 @@ def validate_api(api):
|
||||
return APIStatus.CANNOT_CONNECT
|
||||
|
||||
|
||||
def connect_remote_events(from_api, to_api):
|
||||
"""Setup from_api to forward all events to to_api."""
|
||||
_LOGGER.warning('Event forwarding is deprecated. '
|
||||
'Will be removed by 0.43')
|
||||
data = {
|
||||
'host': to_api.host,
|
||||
'api_password': to_api.api_password,
|
||||
'port': to_api.port
|
||||
}
|
||||
|
||||
try:
|
||||
req = from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
|
||||
|
||||
if req.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error setting up event forwarding: %s - %s",
|
||||
req.status_code, req.text)
|
||||
|
||||
return False
|
||||
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Error setting up event forwarding")
|
||||
return False
|
||||
|
||||
|
||||
def disconnect_remote_events(from_api, to_api):
|
||||
"""Disconnect forwarding events from from_api to to_api."""
|
||||
_LOGGER.warning('Event forwarding is deprecated. '
|
||||
'Will be removed by 0.43')
|
||||
data = {
|
||||
'host': to_api.host,
|
||||
'port': to_api.port
|
||||
}
|
||||
|
||||
try:
|
||||
req = from_api(METHOD_DELETE, URL_API_EVENT_FORWARD, data)
|
||||
|
||||
if req.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error removing event forwarding: %s - %s",
|
||||
req.status_code, req.text)
|
||||
|
||||
return False
|
||||
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Error removing an event forwarder")
|
||||
return False
|
||||
|
||||
|
||||
def get_event_listeners(api):
|
||||
"""List of events that is being listened for."""
|
||||
try:
|
||||
|
@@ -48,7 +48,7 @@ aiolifx==0.4.2
|
||||
|
||||
# homeassistant.components.camera.amcrest
|
||||
# homeassistant.components.sensor.amcrest
|
||||
amcrest==1.1.5
|
||||
amcrest==1.1.8
|
||||
|
||||
# homeassistant.components.media_player.anthemav
|
||||
anthemav==1.1.8
|
||||
@@ -108,6 +108,9 @@ colorlog>2.1,<3
|
||||
# homeassistant.components.binary_sensor.concord232
|
||||
concord232==0.14
|
||||
|
||||
# homeassistant.components.sensor.crimereports
|
||||
crimereports==1.0.0
|
||||
|
||||
# homeassistant.components.sensor.metoffice
|
||||
# homeassistant.components.weather.metoffice
|
||||
datapoint==0.4.3
|
||||
@@ -209,7 +212,7 @@ googlemaps==2.4.6
|
||||
gps3==0.33.3
|
||||
|
||||
# homeassistant.components.media_player.gstreamer
|
||||
gstreamer-player==1.0.0
|
||||
gstreamer-player==1.1.0
|
||||
|
||||
# homeassistant.components.ffmpeg
|
||||
ha-ffmpeg==1.5
|
||||
@@ -311,7 +314,7 @@ https://github.com/tfriedel/python-lightify/archive/d6eadcf311e6e21746182d1480e9
|
||||
https://github.com/thecynic/pylutron/archive/v0.1.0.zip#pylutron==0.1.0
|
||||
|
||||
# homeassistant.components.mysensors
|
||||
https://github.com/theolind/pymysensors/archive/0b705119389be58332f17753c53167f551254b6c.zip#pymysensors==0.8
|
||||
https://github.com/theolind/pymysensors/archive/ff3476b70edc9c995b939cddb9d51f8d2d018581.zip#pymysensors==0.9.0
|
||||
|
||||
# homeassistant.components.sensor.modem_callerid
|
||||
https://github.com/vroomfonde1/basicmodem/archive/0.7.zip#basicmodem==0.7
|
||||
@@ -337,10 +340,10 @@ insteonplm==0.7.4
|
||||
|
||||
# homeassistant.components.media_player.kodi
|
||||
# homeassistant.components.notify.kodi
|
||||
jsonrpc-async==0.4
|
||||
jsonrpc-async==0.6
|
||||
|
||||
# homeassistant.components.media_player.kodi
|
||||
jsonrpc-websocket==0.3
|
||||
jsonrpc-websocket==0.5
|
||||
|
||||
# homeassistant.scripts.keyring
|
||||
keyring>=9.3,<10.0
|
||||
@@ -466,13 +469,13 @@ py-cpuinfo==3.0.0
|
||||
pyCEC==0.4.13
|
||||
|
||||
# homeassistant.components.switch.tplink
|
||||
pyHS100==0.2.4.1
|
||||
pyHS100==0.2.4.2
|
||||
|
||||
# homeassistant.components.rfxtrx
|
||||
pyRFXtrx==0.17.0
|
||||
|
||||
# homeassistant.components.alarm_control_panel.alarmdotcom
|
||||
pyalarmdotcom==0.2.9
|
||||
pyalarmdotcom==0.3.0
|
||||
|
||||
# homeassistant.components.notify.xmpp
|
||||
pyasn1-modules==0.0.8
|
||||
@@ -509,7 +512,7 @@ pydroid-ipcam==0.8
|
||||
pyebox==0.1.0
|
||||
|
||||
# homeassistant.components.media_player.emby
|
||||
pyemby==1.1
|
||||
pyemby==1.2
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==2.0
|
||||
@@ -533,7 +536,7 @@ pyhik==0.1.2
|
||||
pyhomematic==0.1.24
|
||||
|
||||
# homeassistant.components.sensor.hydroquebec
|
||||
pyhydroquebec==1.0.0
|
||||
pyhydroquebec==1.1.0
|
||||
|
||||
# homeassistant.components.device_tracker.icloud
|
||||
pyicloud==0.9.1
|
||||
@@ -657,7 +660,7 @@ pyvera==0.2.25
|
||||
pywebpush==0.6.1
|
||||
|
||||
# homeassistant.components.wemo
|
||||
pywemo==0.4.16
|
||||
pywemo==0.4.17
|
||||
|
||||
# homeassistant.components.zabbix
|
||||
pyzabbix==0.7.4
|
||||
@@ -721,7 +724,7 @@ speedtest-cli==1.0.3
|
||||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.scripts.db_migrator
|
||||
sqlalchemy==1.1.8
|
||||
sqlalchemy==1.1.9
|
||||
|
||||
# homeassistant.components.statsd
|
||||
statsd==3.2.1
|
||||
|
@@ -23,12 +23,13 @@ import homeassistant.util.yaml as yaml
|
||||
from homeassistant.const import (
|
||||
STATE_ON, STATE_OFF, DEVICE_DEFAULT_NAME, EVENT_TIME_CHANGED,
|
||||
EVENT_STATE_CHANGED, EVENT_PLATFORM_DISCOVERED, ATTR_SERVICE,
|
||||
ATTR_DISCOVERED, SERVER_PORT, EVENT_HOMEASSISTANT_STOP)
|
||||
ATTR_DISCOVERED, SERVER_PORT, EVENT_HOMEASSISTANT_CLOSE)
|
||||
from homeassistant.components import sun, mqtt, recorder
|
||||
from homeassistant.components.http.auth import auth_middleware
|
||||
from homeassistant.components.http.const import (
|
||||
KEY_USE_X_FORWARDED_FOR, KEY_BANS_ENABLED, KEY_TRUSTED_NETWORKS)
|
||||
from homeassistant.util.async import run_callback_threadsafe
|
||||
from homeassistant.util.async import (
|
||||
run_callback_threadsafe, run_coroutine_threadsafe)
|
||||
|
||||
_TEST_INSTANCE_PORT = SERVER_PORT
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -58,15 +59,11 @@ def get_test_home_assistant():
|
||||
loop.run_forever()
|
||||
stop_event.set()
|
||||
|
||||
orig_start = hass.start
|
||||
orig_stop = hass.stop
|
||||
|
||||
@patch.object(hass.loop, 'run_forever')
|
||||
@patch.object(hass.loop, 'close')
|
||||
def start_hass(*mocks):
|
||||
"""Helper to start hass."""
|
||||
orig_start()
|
||||
hass.block_till_done()
|
||||
run_coroutine_threadsafe(hass.async_start(), loop=hass.loop).result()
|
||||
|
||||
def stop_hass():
|
||||
"""Stop hass."""
|
||||
@@ -101,7 +98,6 @@ def async_test_home_assistant(loop):
|
||||
return orig_async_add_job(target, *args)
|
||||
|
||||
hass.async_add_job = async_add_job
|
||||
hass.async_track_tasks()
|
||||
|
||||
hass.config.location_name = 'test home'
|
||||
hass.config.config_dir = get_test_config_dir()
|
||||
@@ -123,7 +119,11 @@ def async_test_home_assistant(loop):
|
||||
@asyncio.coroutine
|
||||
def mock_async_start():
|
||||
"""Start the mocking."""
|
||||
with patch('homeassistant.core._async_create_timer'):
|
||||
# 1. We only mock time during tests
|
||||
# 2. We want block_till_done that is called inside stop_track_tasks
|
||||
with patch('homeassistant.core._async_create_timer'), \
|
||||
patch.object(hass, 'async_stop_track_tasks',
|
||||
hass.async_block_till_done):
|
||||
yield from orig_start()
|
||||
|
||||
hass.async_start = mock_async_start
|
||||
@@ -134,7 +134,7 @@ def async_test_home_assistant(loop):
|
||||
global INST_COUNT
|
||||
INST_COUNT -= 1
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, clear_instance)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, clear_instance)
|
||||
|
||||
return hass
|
||||
|
||||
|
@@ -1,11 +1,13 @@
|
||||
"""The tests for the Event automation."""
|
||||
import asyncio
|
||||
import unittest
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.setup import setup_component
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.core import callback, CoreState
|
||||
from homeassistant.setup import setup_component, async_setup_component
|
||||
import homeassistant.components.automation as automation
|
||||
|
||||
from tests.common import get_test_home_assistant, mock_component
|
||||
from tests.common import get_test_home_assistant, mock_component, mock_service
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
@@ -92,3 +94,30 @@ class TestAutomationEvent(unittest.TestCase):
|
||||
self.hass.bus.fire('test_event', {'some_attr': 'some_other_value'})
|
||||
self.hass.block_till_done()
|
||||
self.assertEqual(0, len(self.calls))
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_if_fires_on_event_with_data(hass):
|
||||
"""Test the firing of events with data."""
|
||||
calls = mock_service(hass, 'test', 'automation')
|
||||
hass.state = CoreState.not_running
|
||||
|
||||
res = yield from async_setup_component(hass, automation.DOMAIN, {
|
||||
automation.DOMAIN: {
|
||||
'alias': 'hello',
|
||||
'trigger': {
|
||||
'platform': 'event',
|
||||
'event_type': EVENT_HOMEASSISTANT_START,
|
||||
},
|
||||
'action': {
|
||||
'service': 'test.automation',
|
||||
}
|
||||
}
|
||||
})
|
||||
assert res
|
||||
assert not automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 0
|
||||
|
||||
yield from hass.async_start()
|
||||
assert automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 1
|
||||
|
84
tests/components/automation/test_homeassistant.py
Normal file
84
tests/components/automation/test_homeassistant.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""The tests for the Event automation."""
|
||||
import asyncio
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from homeassistant.core import CoreState
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.components.automation as automation
|
||||
|
||||
from tests.common import mock_service, mock_coro
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_if_fires_on_hass_start(hass):
|
||||
"""Test the firing when HASS starts."""
|
||||
calls = mock_service(hass, 'test', 'automation')
|
||||
hass.state = CoreState.not_running
|
||||
config = {
|
||||
automation.DOMAIN: {
|
||||
'alias': 'hello',
|
||||
'trigger': {
|
||||
'platform': 'homeassistant',
|
||||
'event': 'start',
|
||||
},
|
||||
'action': {
|
||||
'service': 'test.automation',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res = yield from async_setup_component(hass, automation.DOMAIN, config)
|
||||
assert res
|
||||
assert not automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 0
|
||||
|
||||
yield from hass.async_start()
|
||||
assert automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 1
|
||||
|
||||
with patch('homeassistant.config.async_hass_config_yaml',
|
||||
Mock(return_value=mock_coro(config))):
|
||||
yield from hass.services.async_call(
|
||||
automation.DOMAIN, automation.SERVICE_RELOAD, blocking=True)
|
||||
|
||||
assert automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 1
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_if_fires_on_hass_shutdown(hass):
|
||||
"""Test the firing when HASS starts."""
|
||||
calls = mock_service(hass, 'test', 'automation')
|
||||
hass.state = CoreState.not_running
|
||||
|
||||
res = yield from async_setup_component(hass, automation.DOMAIN, {
|
||||
automation.DOMAIN: {
|
||||
'alias': 'hello',
|
||||
'trigger': {
|
||||
'platform': 'homeassistant',
|
||||
'event': 'shutdown',
|
||||
},
|
||||
'action': {
|
||||
'service': 'test.automation',
|
||||
}
|
||||
}
|
||||
})
|
||||
assert res
|
||||
assert not automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 0
|
||||
|
||||
yield from hass.async_start()
|
||||
assert automation.is_on(hass, 'automation.hello')
|
||||
assert len(calls) == 0
|
||||
|
||||
with patch.object(hass.loop, 'stop'):
|
||||
yield from hass.async_stop()
|
||||
assert len(calls) == 1
|
||||
|
||||
# with patch('homeassistant.config.async_hass_config_yaml',
|
||||
# Mock(return_value=mock_coro(config))):
|
||||
# yield from hass.services.async_call(
|
||||
# automation.DOMAIN, automation.SERVICE_RELOAD, blocking=True)
|
||||
|
||||
# assert automation.is_on(hass, 'automation.hello')
|
||||
# assert len(calls) == 1
|
@@ -65,7 +65,7 @@ class TestFFmpegNoiseSetup(object):
|
||||
entity = self.hass.states.get('binary_sensor.ffmpeg_noise')
|
||||
assert entity.state == 'off'
|
||||
|
||||
mock_ffmpeg.call_args[0][2](True)
|
||||
self.hass.add_job(mock_ffmpeg.call_args[0][2], True)
|
||||
self.hass.block_till_done()
|
||||
|
||||
entity = self.hass.states.get('binary_sensor.ffmpeg_noise')
|
||||
@@ -130,7 +130,7 @@ class TestFFmpegMotionSetup(object):
|
||||
entity = self.hass.states.get('binary_sensor.ffmpeg_motion')
|
||||
assert entity.state == 'off'
|
||||
|
||||
mock_ffmpeg.call_args[0][2](True)
|
||||
self.hass.add_job(mock_ffmpeg.call_args[0][2], True)
|
||||
self.hass.block_till_done()
|
||||
|
||||
entity = self.hass.states.get('binary_sensor.ffmpeg_motion')
|
||||
|
@@ -15,7 +15,7 @@ def device(hass, mock_openzwave):
|
||||
node = MockNode()
|
||||
values = MockEntityValues(
|
||||
primary=MockValue(data=1, node=node),
|
||||
temperature=MockValue(data=5, node=node),
|
||||
temperature=MockValue(data=5, node=node, units=None),
|
||||
mode=MockValue(data=b'test1', data_items=[0, 1, 2], node=node),
|
||||
fan_mode=MockValue(data=b'test2', data_items=[3, 4, 5], node=node),
|
||||
operating_state=MockValue(data=6, node=node),
|
||||
@@ -30,9 +30,10 @@ def device(hass, mock_openzwave):
|
||||
def device_zxt_120(hass, mock_openzwave):
|
||||
"""Fixture to provide a precreated climate device."""
|
||||
node = MockNode(manufacturer_id='5254', product_id='8377')
|
||||
|
||||
values = MockEntityValues(
|
||||
primary=MockValue(data=1, node=node),
|
||||
temperature=MockValue(data=5, node=node),
|
||||
temperature=MockValue(data=5, node=node, units=None),
|
||||
mode=MockValue(data=b'test1', data_items=[0, 1, 2], node=node),
|
||||
fan_mode=MockValue(data=b'test2', data_items=[3, 4, 5], node=node),
|
||||
operating_state=MockValue(data=6, node=node),
|
||||
|
@@ -252,6 +252,7 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
"""Ensuring soco methods called for sonos_group_players service."""
|
||||
sonos.setup_platform(self.hass, {}, fake_add_device, '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
device_master = mock.MagicMock()
|
||||
device_master.entity_id = "media_player.test"
|
||||
@@ -269,6 +270,8 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
"""Ensuring soco methods called for sonos_unjoin service."""
|
||||
sonos.setup_platform(self.hass, {}, fake_add_device, '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
unjoinMock.return_value = True
|
||||
device.unjoin()
|
||||
self.assertEqual(unjoinMock.call_count, 1)
|
||||
@@ -281,6 +284,8 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
"""Ensuring soco methods called for sonos_set_sleep_timer service."""
|
||||
sonos.setup_platform(self.hass, {}, fake_add_device, '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
device.set_sleep_timer(30)
|
||||
set_sleep_timerMock.assert_called_once_with(30)
|
||||
|
||||
@@ -291,6 +296,8 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
"""Ensuring soco methods called for sonos_clear_sleep_timer service."""
|
||||
sonos.setup_platform(self.hass, {}, mock.MagicMock(), '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
device.set_sleep_timer(None)
|
||||
set_sleep_timerMock.assert_called_once_with(None)
|
||||
|
||||
@@ -301,6 +308,8 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
"""Ensuring soco methods called for sonos_snapshot service."""
|
||||
sonos.setup_platform(self.hass, {}, fake_add_device, '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
snapshotMock.return_value = True
|
||||
device.snapshot()
|
||||
self.assertEqual(snapshotMock.call_count, 1)
|
||||
@@ -311,11 +320,16 @@ class TestSonosMediaPlayer(unittest.TestCase):
|
||||
@mock.patch.object(soco.snapshot.Snapshot, 'restore')
|
||||
def test_sonos_restore(self, restoreMock, *args):
|
||||
"""Ensuring soco methods called for sonos_restor service."""
|
||||
from soco.snapshot import Snapshot
|
||||
|
||||
sonos.setup_platform(self.hass, {}, fake_add_device, '192.0.2.1')
|
||||
device = self.hass.data[sonos.DATA_SONOS][-1]
|
||||
device.hass = self.hass
|
||||
|
||||
restoreMock.return_value = True
|
||||
device._snapshot_coordinator = mock.MagicMock()
|
||||
device._snapshot_coordinator.soco_device = SoCoMock('192.0.2.17')
|
||||
device._soco_snapshot = Snapshot(device._player)
|
||||
device.restore()
|
||||
self.assertEqual(restoreMock.call_count, 1)
|
||||
self.assertEqual(restoreMock.call_args, mock.call(False))
|
||||
|
@@ -166,7 +166,7 @@ class TestAlert(unittest.TestCase):
|
||||
def test_noack(self):
|
||||
"""Test no ack feature."""
|
||||
entity = alert.Alert(self.hass, *TEST_NOACK)
|
||||
self.hass.async_add_job(entity.begin_alerting)
|
||||
self.hass.add_job(entity.begin_alerting)
|
||||
self.hass.block_till_done()
|
||||
|
||||
self.assertEqual(True, entity.hidden)
|
||||
|
@@ -337,81 +337,6 @@ class TestAPI(unittest.TestCase):
|
||||
|
||||
self.assertEqual(400, req.status_code)
|
||||
|
||||
def test_api_event_forward(self):
|
||||
"""Test setting up event forwarding."""
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(400, req.status_code)
|
||||
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({'host': '127.0.0.1'}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(400, req.status_code)
|
||||
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({'api_password': 'bla-di-bla'}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(400, req.status_code)
|
||||
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({
|
||||
'api_password': 'bla-di-bla',
|
||||
'host': '127.0.0.1',
|
||||
'port': 'abcd'
|
||||
}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(422, req.status_code)
|
||||
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({
|
||||
'api_password': 'bla-di-bla',
|
||||
'host': '127.0.0.1',
|
||||
'port': get_test_instance_port()
|
||||
}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(422, req.status_code)
|
||||
|
||||
# Setup a real one
|
||||
req = requests.post(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({
|
||||
'api_password': API_PASSWORD,
|
||||
'host': '127.0.0.1',
|
||||
'port': SERVER_PORT
|
||||
}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(200, req.status_code)
|
||||
|
||||
# Delete it again..
|
||||
req = requests.delete(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(400, req.status_code)
|
||||
|
||||
req = requests.delete(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({
|
||||
'host': '127.0.0.1',
|
||||
'port': 'abcd'
|
||||
}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(422, req.status_code)
|
||||
|
||||
req = requests.delete(
|
||||
_url(const.URL_API_EVENT_FORWARD),
|
||||
data=json.dumps({
|
||||
'host': '127.0.0.1',
|
||||
'port': SERVER_PORT
|
||||
}),
|
||||
headers=HA_HEADERS)
|
||||
self.assertEqual(200, req.status_code)
|
||||
|
||||
def test_stream(self):
|
||||
"""Test the stream."""
|
||||
listen_count = self._listen_count()
|
||||
|
@@ -5,9 +5,9 @@ from unittest.mock import patch
|
||||
|
||||
from homeassistant.bootstrap import async_setup_component
|
||||
from homeassistant.components import discovery
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from tests.common import mock_coro
|
||||
from tests.common import mock_coro, fire_time_changed
|
||||
|
||||
# One might consider to "mock" services, but it's easy enough to just use
|
||||
# what is already available.
|
||||
@@ -34,24 +34,34 @@ IGNORE_CONFIG = {
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_unknown_service(hass):
|
||||
"""Test that unknown service is ignored."""
|
||||
result = yield from async_setup_component(hass, 'discovery', {
|
||||
'discovery': {},
|
||||
})
|
||||
def mock_discovery(hass, discoveries, config=BASE_CONFIG):
|
||||
"""Helper to mock discoveries."""
|
||||
result = yield from async_setup_component(hass, 'discovery', config)
|
||||
assert result
|
||||
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [('this_service_will_never_be_supported', {'info': 'some'})]
|
||||
yield from hass.async_start()
|
||||
|
||||
with patch.object(discovery, '_discover', discover), \
|
||||
with patch.object(discovery, '_discover', discoveries), \
|
||||
patch('homeassistant.components.discovery.async_discover',
|
||||
return_value=mock_coro()) as mock_discover, \
|
||||
patch('homeassistant.components.discovery.async_load_platform',
|
||||
return_value=mock_coro()) as mock_platform:
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
fire_time_changed(hass, utcnow())
|
||||
# Work around an issue where our loop.call_soon not get caught
|
||||
yield from hass.async_block_till_done()
|
||||
yield from hass.async_block_till_done()
|
||||
|
||||
return mock_discover, mock_platform
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_unknown_service(hass):
|
||||
"""Test that unknown service is ignored."""
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [('this_service_will_never_be_supported', {'info': 'some'})]
|
||||
|
||||
mock_discover, mock_platform = yield from mock_discovery(hass, discover)
|
||||
|
||||
assert not mock_discover.called
|
||||
assert not mock_platform.called
|
||||
@@ -60,20 +70,11 @@ def test_unknown_service(hass):
|
||||
@asyncio.coroutine
|
||||
def test_load_platform(hass):
|
||||
"""Test load a platform."""
|
||||
result = yield from async_setup_component(hass, 'discovery', BASE_CONFIG)
|
||||
assert result
|
||||
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [(SERVICE, SERVICE_INFO)]
|
||||
|
||||
with patch.object(discovery, '_discover', discover), \
|
||||
patch('homeassistant.components.discovery.async_discover',
|
||||
return_value=mock_coro()) as mock_discover, \
|
||||
patch('homeassistant.components.discovery.async_load_platform',
|
||||
return_value=mock_coro()) as mock_platform:
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
yield from hass.async_block_till_done()
|
||||
mock_discover, mock_platform = yield from mock_discovery(hass, discover)
|
||||
|
||||
assert not mock_discover.called
|
||||
assert mock_platform.called
|
||||
@@ -84,20 +85,11 @@ def test_load_platform(hass):
|
||||
@asyncio.coroutine
|
||||
def test_load_component(hass):
|
||||
"""Test load a component."""
|
||||
result = yield from async_setup_component(hass, 'discovery', BASE_CONFIG)
|
||||
assert result
|
||||
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [(SERVICE_NO_PLATFORM, SERVICE_INFO)]
|
||||
|
||||
with patch.object(discovery, '_discover', discover), \
|
||||
patch('homeassistant.components.discovery.async_discover',
|
||||
return_value=mock_coro()) as mock_discover, \
|
||||
patch('homeassistant.components.discovery.async_load_platform',
|
||||
return_value=mock_coro()) as mock_platform:
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
yield from hass.async_block_till_done()
|
||||
mock_discover, mock_platform = yield from mock_discovery(hass, discover)
|
||||
|
||||
assert mock_discover.called
|
||||
assert not mock_platform.called
|
||||
@@ -109,20 +101,12 @@ def test_load_component(hass):
|
||||
@asyncio.coroutine
|
||||
def test_ignore_service(hass):
|
||||
"""Test ignore service."""
|
||||
result = yield from async_setup_component(hass, 'discovery', IGNORE_CONFIG)
|
||||
assert result
|
||||
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [(SERVICE_NO_PLATFORM, SERVICE_INFO)]
|
||||
|
||||
with patch.object(discovery, '_discover', discover), \
|
||||
patch('homeassistant.components.discovery.async_discover',
|
||||
return_value=mock_coro()) as mock_discover, \
|
||||
patch('homeassistant.components.discovery.async_load_platform',
|
||||
return_value=mock_coro()) as mock_platform:
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
yield from hass.async_block_till_done()
|
||||
mock_discover, mock_platform = yield from mock_discovery(hass, discover,
|
||||
IGNORE_CONFIG)
|
||||
|
||||
assert not mock_discover.called
|
||||
assert not mock_platform.called
|
||||
@@ -131,21 +115,12 @@ def test_ignore_service(hass):
|
||||
@asyncio.coroutine
|
||||
def test_discover_duplicates(hass):
|
||||
"""Test load a component."""
|
||||
result = yield from async_setup_component(hass, 'discovery', BASE_CONFIG)
|
||||
assert result
|
||||
|
||||
def discover(netdisco):
|
||||
"""Fake discovery."""
|
||||
return [(SERVICE_NO_PLATFORM, SERVICE_INFO),
|
||||
(SERVICE_NO_PLATFORM, SERVICE_INFO)]
|
||||
|
||||
with patch.object(discovery, '_discover', discover), \
|
||||
patch('homeassistant.components.discovery.async_discover',
|
||||
return_value=mock_coro()) as mock_discover, \
|
||||
patch('homeassistant.components.discovery.async_load_platform',
|
||||
return_value=mock_coro()) as mock_platform:
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
yield from hass.async_block_till_done()
|
||||
mock_discover, mock_platform = yield from mock_discovery(hass, discover)
|
||||
|
||||
assert mock_discover.called
|
||||
assert mock_discover.call_count == 1
|
||||
|
543
tests/components/test_hassio.py
Normal file
543
tests/components/test_hassio.py
Normal file
@@ -0,0 +1,543 @@
|
||||
"""The tests for the hassio component."""
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import aiohttp
|
||||
|
||||
import homeassistant.components.hassio as ho
|
||||
from homeassistant.setup import setup_component, async_setup_component
|
||||
|
||||
from tests.common import (
|
||||
get_test_home_assistant, assert_setup_component)
|
||||
|
||||
|
||||
class TestHassIOSetup(object):
|
||||
"""Test the hassio component."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup things to be run when tests are started."""
|
||||
self.hass = get_test_home_assistant()
|
||||
|
||||
self.config = {
|
||||
ho.DOMAIN: {},
|
||||
}
|
||||
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
def teardown_method(self):
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
def test_setup_component(self, aioclient_mock):
|
||||
"""Test setup component."""
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
def test_setup_component_test_service(self, aioclient_mock):
|
||||
"""Test setup component and check if service exits."""
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_HOST_REBOOT)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_HOST_SHUTDOWN)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_HOST_UPDATE)
|
||||
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_SUPERVISOR_UPDATE)
|
||||
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_INSTALL)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_UNINSTALL)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_UPDATE)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_START)
|
||||
assert self.hass.services.has_service(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_STOP)
|
||||
|
||||
|
||||
class TestHassIOComponent(object):
|
||||
"""Test the HassIO component."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup things to be run when tests are started."""
|
||||
self.hass = get_test_home_assistant()
|
||||
self.config = {
|
||||
ho.DOMAIN: {},
|
||||
}
|
||||
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
self.url = "http://127.0.0.1/{}"
|
||||
|
||||
self.error_msg = {
|
||||
'result': 'error',
|
||||
'message': 'Test error',
|
||||
}
|
||||
self.ok_msg = {
|
||||
'result': 'ok',
|
||||
'data': {},
|
||||
}
|
||||
|
||||
def teardown_method(self):
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
def test_rest_command_timeout(self, aioclient_mock):
|
||||
"""Call a hassio with timeout."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/update"), exc=asyncio.TimeoutError())
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_UPDATE, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_aiohttp_error(self, aioclient_mock):
|
||||
"""Call a hassio with aiohttp exception."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/update"), exc=aiohttp.ClientError())
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_UPDATE, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_error(self, aioclient_mock):
|
||||
"""Call a hassio with status code 503."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/update"), status=503)
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_UPDATE, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_error_api(self, aioclient_mock):
|
||||
"""Call a hassio with status code 503."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/update"), json=self.error_msg)
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_UPDATE, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_host_reboot(self, aioclient_mock):
|
||||
"""Call a hassio for host reboot."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/reboot"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_REBOOT, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_host_shutdown(self, aioclient_mock):
|
||||
"""Call a hassio for host shutdown."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/shutdown"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(ho.DOMAIN, ho.SERVICE_HOST_SHUTDOWN, {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_host_update(self, aioclient_mock):
|
||||
"""Call a hassio for host update."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("host/update"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_HOST_UPDATE, {'version': '0.4'})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert aioclient_mock.mock_calls[-1][2]['version'] == '0.4'
|
||||
|
||||
def test_rest_command_http_supervisor_update(self, aioclient_mock):
|
||||
"""Call a hassio for supervisor update."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("supervisor/update"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_SUPERVISOR_UPDATE, {'version': '0.4'})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert aioclient_mock.mock_calls[-1][2]['version'] == '0.4'
|
||||
|
||||
def test_rest_command_http_homeassistant_update(self, aioclient_mock):
|
||||
"""Call a hassio for homeassistant update."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("homeassistant/update"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_HOMEASSISTANT_UPDATE, {'version': '0.4'})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert aioclient_mock.mock_calls[-1][2]['version'] == '0.4'
|
||||
|
||||
def test_rest_command_http_addon_install(self, aioclient_mock):
|
||||
"""Call a hassio for addon install."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("addons/smb_config/install"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_INSTALL, {
|
||||
'addon': 'smb_config',
|
||||
'version': '0.4'
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert aioclient_mock.mock_calls[-1][2]['version'] == '0.4'
|
||||
|
||||
def test_rest_command_http_addon_uninstall(self, aioclient_mock):
|
||||
"""Call a hassio for addon uninstall."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("addons/smb_config/uninstall"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_UNINSTALL, {
|
||||
'addon': 'smb_config'
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_addon_update(self, aioclient_mock):
|
||||
"""Call a hassio for addon update."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("addons/smb_config/update"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_UPDATE, {
|
||||
'addon': 'smb_config',
|
||||
'version': '0.4'
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert aioclient_mock.mock_calls[-1][2]['version'] == '0.4'
|
||||
|
||||
def test_rest_command_http_addon_start(self, aioclient_mock):
|
||||
"""Call a hassio for addon start."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("addons/smb_config/start"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_START, {
|
||||
'addon': 'smb_config',
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
def test_rest_command_http_addon_stop(self, aioclient_mock):
|
||||
"""Call a hassio for addon stop."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/supervisor/ping", json=self.ok_msg)
|
||||
with assert_setup_component(0, ho.DOMAIN):
|
||||
setup_component(self.hass, ho.DOMAIN, self.config)
|
||||
|
||||
aioclient_mock.get(
|
||||
self.url.format("addons/smb_config/stop"), json=self.ok_msg)
|
||||
|
||||
self.hass.services.call(
|
||||
ho.DOMAIN, ho.SERVICE_ADDON_STOP, {
|
||||
'addon': 'smb_config'
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_async_hassio_host_view(aioclient_mock, hass, test_client):
|
||||
"""Test that it fetches the given url."""
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
result = yield from async_setup_component(hass, ho.DOMAIN, {ho.DOMAIN: {}})
|
||||
assert result, 'Failed to setup hasio'
|
||||
|
||||
client = yield from test_client(hass.http.app)
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/host/info', json={
|
||||
'result': 'ok',
|
||||
'data': {
|
||||
'os': 'resinos',
|
||||
'version': '0.3',
|
||||
'current': '0.4',
|
||||
'level': 16,
|
||||
'hostname': 'test',
|
||||
}
|
||||
})
|
||||
|
||||
resp = yield from client.get('/api/hassio/host')
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert resp.status == 200
|
||||
assert data['os'] == 'resinos'
|
||||
assert data['version'] == '0.3'
|
||||
assert data['current'] == '0.4'
|
||||
assert data['level'] == 16
|
||||
assert data['hostname'] == 'test'
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_async_hassio_homeassistant_view(aioclient_mock, hass, test_client):
|
||||
"""Test that it fetches the given url."""
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
result = yield from async_setup_component(hass, ho.DOMAIN, {ho.DOMAIN: {}})
|
||||
assert result, 'Failed to setup hasio'
|
||||
|
||||
client = yield from test_client(hass.http.app)
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/homeassistant/info', json={
|
||||
'result': 'ok',
|
||||
'data': {
|
||||
'version': '0.41',
|
||||
'current': '0.41.1',
|
||||
}
|
||||
})
|
||||
|
||||
resp = yield from client.get('/api/hassio/homeassistant')
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert resp.status == 200
|
||||
assert data['version'] == '0.41'
|
||||
assert data['current'] == '0.41.1'
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_async_hassio_supervisor_view(aioclient_mock, hass, test_client):
|
||||
"""Test that it fetches the given url."""
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
result = yield from async_setup_component(hass, ho.DOMAIN, {ho.DOMAIN: {}})
|
||||
assert result, 'Failed to setup hasio'
|
||||
|
||||
client = yield from test_client(hass.http.app)
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/supervisor/info', json={
|
||||
'result': 'ok',
|
||||
'data': {
|
||||
'version': '0.3',
|
||||
'current': '0.4',
|
||||
'beta': False,
|
||||
}
|
||||
})
|
||||
|
||||
resp = yield from client.get('/api/hassio/supervisor')
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert resp.status == 200
|
||||
assert data['version'] == '0.3'
|
||||
assert data['current'] == '0.4'
|
||||
assert not data['beta']
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/supervisor/options', json={
|
||||
'result': 'ok',
|
||||
'data': {},
|
||||
})
|
||||
|
||||
resp = yield from client.post('/api/hassio/supervisor', json={
|
||||
'beta': True,
|
||||
})
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 3
|
||||
assert resp.status == 200
|
||||
assert aioclient_mock.mock_calls[-1][2]['beta']
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_async_hassio_network_view(aioclient_mock, hass, test_client):
|
||||
"""Test that it fetches the given url."""
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
result = yield from async_setup_component(hass, ho.DOMAIN, {ho.DOMAIN: {}})
|
||||
assert result, 'Failed to setup hasio'
|
||||
|
||||
client = yield from test_client(hass.http.app)
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/network/info', json={
|
||||
'result': 'ok',
|
||||
'data': {
|
||||
'mode': 'dhcp',
|
||||
'ssid': 'my_wlan',
|
||||
'password': '123456',
|
||||
}
|
||||
})
|
||||
|
||||
resp = yield from client.get('/api/hassio/network')
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert resp.status == 200
|
||||
assert data['mode'] == 'dhcp'
|
||||
assert data['ssid'] == 'my_wlan'
|
||||
assert data['password'] == '123456'
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/network/options', json={
|
||||
'result': 'ok',
|
||||
'data': {},
|
||||
})
|
||||
|
||||
resp = yield from client.post('/api/hassio/network', json={
|
||||
'mode': 'dhcp',
|
||||
'ssid': 'my_wlan2',
|
||||
'password': '654321',
|
||||
})
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 3
|
||||
assert resp.status == 200
|
||||
assert aioclient_mock.mock_calls[-1][2]['ssid'] == 'my_wlan2'
|
||||
assert aioclient_mock.mock_calls[-1][2]['password'] == '654321'
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_async_hassio_addon_view(aioclient_mock, hass, test_client):
|
||||
"""Test that it fetches the given url."""
|
||||
os.environ['HASSIO'] = "127.0.0.1"
|
||||
|
||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={
|
||||
'result': 'ok', 'data': {}
|
||||
})
|
||||
result = yield from async_setup_component(hass, ho.DOMAIN, {ho.DOMAIN: {}})
|
||||
assert result, 'Failed to setup hasio'
|
||||
|
||||
client = yield from test_client(hass.http.app)
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/addons/smb_config/info', json={
|
||||
'result': 'ok',
|
||||
'data': {
|
||||
'name': 'SMB Config',
|
||||
'state': 'running',
|
||||
'boot': 'auto',
|
||||
'options': {
|
||||
'bla': False,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
resp = yield from client.get('/api/hassio/addons/smb_config')
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert resp.status == 200
|
||||
assert data['name'] == 'SMB Config'
|
||||
assert data['state'] == 'running'
|
||||
assert data['boot'] == 'auto'
|
||||
assert not data['options']['bla']
|
||||
|
||||
aioclient_mock.get('http://127.0.0.1/addons/smb_config/options', json={
|
||||
'result': 'ok',
|
||||
'data': {},
|
||||
})
|
||||
|
||||
resp = yield from client.post('/api/hassio/addons/smb_config', json={
|
||||
'boot': 'manual',
|
||||
'options': {
|
||||
'bla': True,
|
||||
}
|
||||
})
|
||||
data = yield from resp.json()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 3
|
||||
assert resp.status == 200
|
||||
assert aioclient_mock.mock_calls[-1][2]['boot'] == 'manual'
|
||||
assert aioclient_mock.mock_calls[-1][2]['options']['bla']
|
@@ -44,6 +44,38 @@ class TestSun(unittest.TestCase):
|
||||
latitude = self.hass.config.latitude
|
||||
longitude = self.hass.config.longitude
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
next_dawn = (astral.dawn_utc(utc_now +
|
||||
timedelta(days=mod), latitude, longitude))
|
||||
if next_dawn > utc_now:
|
||||
break
|
||||
mod += 1
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
next_dusk = (astral.dusk_utc(utc_now +
|
||||
timedelta(days=mod), latitude, longitude))
|
||||
if next_dusk > utc_now:
|
||||
break
|
||||
mod += 1
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
next_midnight = (astral.solar_midnight_utc(utc_now +
|
||||
timedelta(days=mod), longitude))
|
||||
if next_midnight > utc_now:
|
||||
break
|
||||
mod += 1
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
next_noon = (astral.solar_noon_utc(utc_now +
|
||||
timedelta(days=mod), longitude))
|
||||
if next_noon > utc_now:
|
||||
break
|
||||
mod += 1
|
||||
|
||||
mod = -1
|
||||
while True:
|
||||
next_rising = (astral.sunrise_utc(utc_now +
|
||||
@@ -60,15 +92,27 @@ class TestSun(unittest.TestCase):
|
||||
break
|
||||
mod += 1
|
||||
|
||||
self.assertEqual(next_dawn, sun.next_dawn_utc(self.hass))
|
||||
self.assertEqual(next_dusk, sun.next_dusk_utc(self.hass))
|
||||
self.assertEqual(next_midnight, sun.next_midnight_utc(self.hass))
|
||||
self.assertEqual(next_noon, sun.next_noon_utc(self.hass))
|
||||
self.assertEqual(next_rising, sun.next_rising_utc(self.hass))
|
||||
self.assertEqual(next_setting, sun.next_setting_utc(self.hass))
|
||||
|
||||
# Point it at a state without the proper attributes
|
||||
self.hass.states.set(sun.ENTITY_ID, sun.STATE_ABOVE_HORIZON)
|
||||
self.assertIsNone(sun.next_dawn(self.hass))
|
||||
self.assertIsNone(sun.next_dusk(self.hass))
|
||||
self.assertIsNone(sun.next_midnight(self.hass))
|
||||
self.assertIsNone(sun.next_noon(self.hass))
|
||||
self.assertIsNone(sun.next_rising(self.hass))
|
||||
self.assertIsNone(sun.next_setting(self.hass))
|
||||
|
||||
# Point it at a non-existing state
|
||||
self.assertIsNone(sun.next_dawn(self.hass, 'non.existing'))
|
||||
self.assertIsNone(sun.next_dusk(self.hass, 'non.existing'))
|
||||
self.assertIsNone(sun.next_midnight(self.hass, 'non.existing'))
|
||||
self.assertIsNone(sun.next_noon(self.hass, 'non.existing'))
|
||||
self.assertIsNone(sun.next_rising(self.hass, 'non.existing'))
|
||||
self.assertIsNone(sun.next_setting(self.hass, 'non.existing'))
|
||||
|
||||
|
@@ -1,7 +1,28 @@
|
||||
"""Tests for the Z-Wave init."""
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
|
||||
from homeassistant.bootstrap import async_setup_component
|
||||
from homeassistant.const import ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.components import zwave
|
||||
from homeassistant.components.binary_sensor.zwave import get_device
|
||||
from homeassistant.components.zwave import (
|
||||
const, CONFIG_SCHEMA, CONF_DEVICE_CONFIG_GLOB)
|
||||
from homeassistant.setup import setup_component
|
||||
|
||||
import pytest
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from tests.common import get_test_home_assistant
|
||||
from tests.mock.zwave import MockNetwork, MockNode, MockValue, MockEntityValues
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_missing_openzwave(hass):
|
||||
"""Test that missing openzwave lib stops setup."""
|
||||
result = yield from async_setup_component(hass, 'zwave', {'zwave': {}})
|
||||
assert not result
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
@@ -34,3 +55,764 @@ def test_invalid_device_config(hass, mock_openzwave):
|
||||
}})
|
||||
|
||||
assert not result
|
||||
|
||||
|
||||
def test_config_access_error():
|
||||
"""Test threading error accessing config values."""
|
||||
node = MagicMock()
|
||||
|
||||
def side_effect():
|
||||
raise RuntimeError
|
||||
|
||||
node.values.values.side_effect = side_effect
|
||||
result = zwave.get_config_value(node, 1)
|
||||
assert result is None
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
@patch.object(zwave, 'NETWORK')
|
||||
def test_setup_platform(mock_network, hass, mock_openzwave):
|
||||
"""Test invalid device config."""
|
||||
mock_device = MagicMock()
|
||||
hass.data[zwave.DATA_ZWAVE_DICT] = {456: mock_device}
|
||||
async_add_devices = MagicMock()
|
||||
|
||||
result = yield from zwave.async_setup_platform(
|
||||
hass, None, async_add_devices, None)
|
||||
assert not result
|
||||
assert not async_add_devices.called
|
||||
|
||||
result = yield from zwave.async_setup_platform(
|
||||
hass, None, async_add_devices, {const.DISCOVERY_DEVICE: 123})
|
||||
assert not result
|
||||
assert not async_add_devices.called
|
||||
|
||||
result = yield from zwave.async_setup_platform(
|
||||
hass, None, async_add_devices, {const.DISCOVERY_DEVICE: 456})
|
||||
assert result
|
||||
assert async_add_devices.called
|
||||
assert len(async_add_devices.mock_calls) == 1
|
||||
assert async_add_devices.mock_calls[0][1][0] == [mock_device]
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_zwave_ready_wait(hass, mock_openzwave):
|
||||
"""Test that zwave continues after waiting for network ready."""
|
||||
# Initialize zwave
|
||||
yield from async_setup_component(hass, 'zwave', {'zwave': {}})
|
||||
yield from hass.async_block_till_done()
|
||||
|
||||
with patch.object(zwave.time, 'sleep') as mock_sleep:
|
||||
with patch.object(zwave, '_LOGGER') as mock_logger:
|
||||
zwave.NETWORK.state = MockNetwork.STATE_STARTED
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
yield from hass.async_block_till_done()
|
||||
|
||||
assert mock_sleep.called
|
||||
assert len(mock_sleep.mock_calls) == const.NETWORK_READY_WAIT_SECS
|
||||
assert mock_logger.warning.called
|
||||
assert len(mock_logger.warning.mock_calls) == 1
|
||||
assert mock_logger.warning.mock_calls[0][1][1] == \
|
||||
const.NETWORK_READY_WAIT_SECS
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_device_entity(hass, mock_openzwave):
|
||||
"""Test device entity base class."""
|
||||
node = MockNode(node_id='10', name='Mock Node')
|
||||
value = MockValue(data=False, node=node, instance=2, object_id='11',
|
||||
label='Sensor',
|
||||
command_class=const.COMMAND_CLASS_SENSOR_BINARY)
|
||||
power_value = MockValue(data=50.123456, node=node, precision=3,
|
||||
command_class=const.COMMAND_CLASS_METER)
|
||||
values = MockEntityValues(primary=value, power=power_value)
|
||||
device = zwave.ZWaveDeviceEntity(values, 'zwave')
|
||||
device.hass = hass
|
||||
device.value_added()
|
||||
device.update_properties()
|
||||
yield from hass.async_block_till_done()
|
||||
|
||||
assert not device.should_poll
|
||||
assert device.unique_id == "ZWAVE-10-11"
|
||||
assert device.name == 'Mock Node Sensor'
|
||||
assert device.device_state_attributes[zwave.ATTR_POWER] == 50.123
|
||||
|
||||
|
||||
class TestZWaveDeviceEntityValues(unittest.TestCase):
|
||||
"""Tests for the ZWaveDeviceEntityValues helper."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def set_mock_openzwave(self, mock_openzwave):
|
||||
"""Use the mock_openzwave fixture for this class."""
|
||||
self.mock_openzwave = mock_openzwave
|
||||
|
||||
def setUp(self):
|
||||
"""Initialize values for this testcase class."""
|
||||
self.hass = get_test_home_assistant()
|
||||
self.hass.start()
|
||||
|
||||
setup_component(self.hass, 'zwave', {'zwave': {}})
|
||||
self.hass.block_till_done()
|
||||
|
||||
self.node = MockNode()
|
||||
self.mock_schema = {
|
||||
const.DISC_COMPONENT: 'mock_component',
|
||||
const.DISC_VALUES: {
|
||||
const.DISC_PRIMARY: {
|
||||
const.DISC_COMMAND_CLASS: ['mock_primary_class'],
|
||||
},
|
||||
'secondary': {
|
||||
const.DISC_COMMAND_CLASS: ['mock_secondary_class'],
|
||||
},
|
||||
'optional': {
|
||||
const.DISC_COMMAND_CLASS: ['mock_optional_class'],
|
||||
const.DISC_OPTIONAL: True,
|
||||
}}}
|
||||
self.primary = MockValue(
|
||||
command_class='mock_primary_class', node=self.node)
|
||||
self.secondary = MockValue(
|
||||
command_class='mock_secondary_class', node=self.node)
|
||||
self.duplicate_secondary = MockValue(
|
||||
command_class='mock_secondary_class', node=self.node)
|
||||
self.optional = MockValue(
|
||||
command_class='mock_optional_class', node=self.node)
|
||||
self.no_match_value = MockValue(
|
||||
command_class='mock_bad_class', node=self.node)
|
||||
|
||||
self.entity_id = '{}.{}'.format('mock_component',
|
||||
zwave.object_id(self.primary))
|
||||
self.zwave_config = {}
|
||||
self.device_config = {self.entity_id: {}}
|
||||
|
||||
def tearDown(self): # pylint: disable=invalid-name
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_discovery(self, discovery, get_platform):
|
||||
"""Test the creation of a new entity."""
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
|
||||
assert values.primary is self.primary
|
||||
assert len(list(values)) == 3
|
||||
self.assertEqual(sorted(list(values),
|
||||
key=lambda a: id(a)),
|
||||
sorted([self.primary, None, None],
|
||||
key=lambda a: id(a)))
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
values.check_value(self.secondary)
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert values.secondary is self.secondary
|
||||
assert len(list(values)) == 3
|
||||
self.assertEqual(sorted(list(values),
|
||||
key=lambda a: id(a)),
|
||||
sorted([self.primary, self.secondary, None],
|
||||
key=lambda a: id(a)))
|
||||
|
||||
assert discovery.async_load_platform.called
|
||||
# Second call is to async yield from
|
||||
assert len(discovery.async_load_platform.mock_calls) == 2
|
||||
args = discovery.async_load_platform.mock_calls[0][1]
|
||||
assert args[0] == self.hass
|
||||
assert args[1] == 'mock_component'
|
||||
assert args[2] == 'zwave'
|
||||
assert args[3] == {const.DISCOVERY_DEVICE: id(values)}
|
||||
assert args[4] == self.zwave_config
|
||||
|
||||
discovery.async_load_platform.reset_mock()
|
||||
values.check_value(self.optional)
|
||||
values.check_value(self.duplicate_secondary)
|
||||
values.check_value(self.no_match_value)
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert values.optional is self.optional
|
||||
assert len(list(values)) == 3
|
||||
self.assertEqual(sorted(list(values),
|
||||
key=lambda a: id(a)),
|
||||
sorted([self.primary, self.secondary, self.optional],
|
||||
key=lambda a: id(a)))
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
assert values._entity.value_added.called
|
||||
assert len(values._entity.value_added.mock_calls) == 1
|
||||
assert values._entity.value_changed.called
|
||||
assert len(values._entity.value_changed.mock_calls) == 1
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_existing_values(self, discovery, get_platform):
|
||||
"""Test the loading of already discovered values."""
|
||||
self.node.values = {
|
||||
self.primary.value_id: self.primary,
|
||||
self.secondary.value_id: self.secondary,
|
||||
self.optional.value_id: self.optional,
|
||||
self.no_match_value.value_id: self.no_match_value,
|
||||
}
|
||||
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert values.primary is self.primary
|
||||
assert values.secondary is self.secondary
|
||||
assert values.optional is self.optional
|
||||
assert len(list(values)) == 3
|
||||
self.assertEqual(sorted(list(values),
|
||||
key=lambda a: id(a)),
|
||||
sorted([self.primary, self.secondary, self.optional],
|
||||
key=lambda a: id(a)))
|
||||
|
||||
assert discovery.async_load_platform.called
|
||||
# Second call is to async yield from
|
||||
assert len(discovery.async_load_platform.mock_calls) == 2
|
||||
args = discovery.async_load_platform.mock_calls[0][1]
|
||||
assert args[0] == self.hass
|
||||
assert args[1] == 'mock_component'
|
||||
assert args[2] == 'zwave'
|
||||
assert args[3] == {const.DISCOVERY_DEVICE: id(values)}
|
||||
assert args[4] == self.zwave_config
|
||||
assert not self.primary.enable_poll.called
|
||||
assert self.primary.disable_poll.called
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_node_schema_mismatch(self, discovery, get_platform):
|
||||
"""Test node schema mismatch."""
|
||||
self.node.generic = 'no_match'
|
||||
self.node.values = {
|
||||
self.primary.value_id: self.primary,
|
||||
self.secondary.value_id: self.secondary,
|
||||
}
|
||||
self.mock_schema[const.DISC_GENERIC_DEVICE_CLASS] = ['generic_match']
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
values._check_entity_ready()
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_workaround_component(self, discovery, get_platform):
|
||||
"""Test ignore workaround."""
|
||||
self.node.manufacturer_id = '010f'
|
||||
self.node.product_type = '0b00'
|
||||
self.primary.command_class = const.COMMAND_CLASS_SENSOR_ALARM
|
||||
self.entity_id = '{}.{}'.format('binary_sensor',
|
||||
zwave.object_id(self.primary))
|
||||
self.device_config = {self.entity_id: {}}
|
||||
|
||||
self.mock_schema = {
|
||||
const.DISC_COMPONENT: 'mock_component',
|
||||
const.DISC_VALUES: {
|
||||
const.DISC_PRIMARY: {
|
||||
const.DISC_COMMAND_CLASS: [
|
||||
const.COMMAND_CLASS_SWITCH_BINARY],
|
||||
}}}
|
||||
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
values._check_entity_ready()
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert discovery.async_load_platform.called
|
||||
# Second call is to async yield from
|
||||
assert len(discovery.async_load_platform.mock_calls) == 2
|
||||
args = discovery.async_load_platform.mock_calls[0][1]
|
||||
assert args[1] == 'binary_sensor'
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_workaround_ignore(self, discovery, get_platform):
|
||||
"""Test ignore workaround."""
|
||||
self.node.manufacturer_id = '010f'
|
||||
self.node.product_type = '0301'
|
||||
self.primary.command_class = const.COMMAND_CLASS_SWITCH_BINARY
|
||||
|
||||
self.mock_schema = {
|
||||
const.DISC_COMPONENT: 'mock_component',
|
||||
const.DISC_VALUES: {
|
||||
const.DISC_PRIMARY: {
|
||||
const.DISC_COMMAND_CLASS: [
|
||||
const.COMMAND_CLASS_SWITCH_BINARY],
|
||||
}}}
|
||||
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
values._check_entity_ready()
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_config_ignore(self, discovery, get_platform):
|
||||
"""Test ignore config."""
|
||||
self.node.values = {
|
||||
self.primary.value_id: self.primary,
|
||||
self.secondary.value_id: self.secondary,
|
||||
}
|
||||
self.device_config = {self.entity_id: {
|
||||
zwave.CONF_IGNORED: True
|
||||
}}
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
values._check_entity_ready()
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_entity_platform_ignore(self, discovery, get_platform):
|
||||
"""Test platform ignore device."""
|
||||
self.node.values = {
|
||||
self.primary.value_id: self.primary,
|
||||
self.secondary.value_id: self.secondary,
|
||||
}
|
||||
platform = MagicMock()
|
||||
get_platform.return_value = platform
|
||||
platform.get_device.return_value = None
|
||||
zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert not discovery.async_load_platform.called
|
||||
|
||||
@patch.object(zwave, 'get_platform')
|
||||
@patch.object(zwave, 'discovery')
|
||||
def test_config_polling_intensity(self, discovery, get_platform):
|
||||
"""Test polling intensity."""
|
||||
self.node.values = {
|
||||
self.primary.value_id: self.primary,
|
||||
self.secondary.value_id: self.secondary,
|
||||
}
|
||||
self.device_config = {self.entity_id: {
|
||||
zwave.CONF_POLLING_INTENSITY: 123,
|
||||
}}
|
||||
values = zwave.ZWaveDeviceEntityValues(
|
||||
hass=self.hass,
|
||||
schema=self.mock_schema,
|
||||
primary_value=self.primary,
|
||||
zwave_config=self.zwave_config,
|
||||
device_config=self.device_config,
|
||||
)
|
||||
values._check_entity_ready()
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert discovery.async_load_platform.called
|
||||
assert self.primary.enable_poll.called
|
||||
assert len(self.primary.enable_poll.mock_calls) == 1
|
||||
assert self.primary.enable_poll.mock_calls[0][1][0] == 123
|
||||
assert not self.primary.disable_poll.called
|
||||
|
||||
|
||||
class TestZwave(unittest.TestCase):
|
||||
"""Test zwave init."""
|
||||
|
||||
def test_device_config_glob_is_ordered(self):
|
||||
"""Test that device_config_glob preserves order."""
|
||||
conf = CONFIG_SCHEMA(
|
||||
{'zwave': {CONF_DEVICE_CONFIG_GLOB: OrderedDict()}})
|
||||
self.assertIsInstance(
|
||||
conf['zwave'][CONF_DEVICE_CONFIG_GLOB], OrderedDict)
|
||||
|
||||
|
||||
class TestZWaveServices(unittest.TestCase):
|
||||
"""Tests for zwave services."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def set_mock_openzwave(self, mock_openzwave):
|
||||
"""Use the mock_openzwave fixture for this class."""
|
||||
self.mock_openzwave = mock_openzwave
|
||||
|
||||
def setUp(self):
|
||||
"""Initialize values for this testcase class."""
|
||||
self.hass = get_test_home_assistant()
|
||||
self.hass.start()
|
||||
|
||||
# Initialize zwave
|
||||
setup_component(self.hass, 'zwave', {'zwave': {}})
|
||||
self.hass.block_till_done()
|
||||
zwave.NETWORK.state = MockNetwork.STATE_READY
|
||||
self.hass.bus.fire(EVENT_HOMEASSISTANT_START)
|
||||
self.hass.block_till_done()
|
||||
|
||||
def tearDown(self): # pylint: disable=invalid-name
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
def test_add_node(self):
|
||||
"""Test zwave add_node service."""
|
||||
self.hass.services.call('zwave', 'add_node', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.controller.add_node.called
|
||||
assert len(zwave.NETWORK.controller.add_node.mock_calls) == 1
|
||||
assert len(zwave.NETWORK.controller.add_node.mock_calls[0][1]) == 0
|
||||
|
||||
def test_add_node_secure(self):
|
||||
"""Test zwave add_node_secure service."""
|
||||
self.hass.services.call('zwave', 'add_node_secure', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.controller.add_node.called
|
||||
assert len(zwave.NETWORK.controller.add_node.mock_calls) == 1
|
||||
assert zwave.NETWORK.controller.add_node.mock_calls[0][1][0] is True
|
||||
|
||||
def test_remove_node(self):
|
||||
"""Test zwave remove_node service."""
|
||||
self.hass.services.call('zwave', 'remove_node', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.controller.remove_node.called
|
||||
assert len(zwave.NETWORK.controller.remove_node.mock_calls) == 1
|
||||
|
||||
def test_cancel_command(self):
|
||||
"""Test zwave cancel_command service."""
|
||||
self.hass.services.call('zwave', 'cancel_command', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.controller.cancel_command.called
|
||||
assert len(zwave.NETWORK.controller.cancel_command.mock_calls) == 1
|
||||
|
||||
def test_heal_network(self):
|
||||
"""Test zwave heal_network service."""
|
||||
self.hass.services.call('zwave', 'heal_network', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.heal.called
|
||||
assert len(zwave.NETWORK.heal.mock_calls) == 1
|
||||
|
||||
def test_soft_reset(self):
|
||||
"""Test zwave soft_reset service."""
|
||||
self.hass.services.call('zwave', 'soft_reset', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.controller.soft_reset.called
|
||||
assert len(zwave.NETWORK.controller.soft_reset.mock_calls) == 1
|
||||
|
||||
def test_test_network(self):
|
||||
"""Test zwave test_network service."""
|
||||
self.hass.services.call('zwave', 'test_network', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.test.called
|
||||
assert len(zwave.NETWORK.test.mock_calls) == 1
|
||||
|
||||
def test_stop_network(self):
|
||||
"""Test zwave stop_network service."""
|
||||
with patch.object(self.hass.bus, 'fire') as mock_fire:
|
||||
self.hass.services.call('zwave', 'stop_network', {})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.stop.called
|
||||
assert len(zwave.NETWORK.stop.mock_calls) == 1
|
||||
assert mock_fire.called
|
||||
assert len(mock_fire.mock_calls) == 2
|
||||
assert mock_fire.mock_calls[0][1][0] == const.EVENT_NETWORK_STOP
|
||||
|
||||
def test_rename_node(self):
|
||||
"""Test zwave rename_node service."""
|
||||
zwave.NETWORK.nodes = {11: MagicMock()}
|
||||
self.hass.services.call('zwave', 'rename_node', {
|
||||
const.ATTR_NODE_ID: 11,
|
||||
const.ATTR_NAME: 'test_name',
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert zwave.NETWORK.nodes[11].name == 'test_name'
|
||||
|
||||
def test_remove_failed_node(self):
|
||||
"""Test zwave remove_failed_node service."""
|
||||
self.hass.services.call('zwave', 'remove_failed_node', {
|
||||
const.ATTR_NODE_ID: 12,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
remove_failed_node = zwave.NETWORK.controller.remove_failed_node
|
||||
assert remove_failed_node.called
|
||||
assert len(remove_failed_node.mock_calls) == 1
|
||||
assert remove_failed_node.mock_calls[0][1][0] == 12
|
||||
|
||||
def test_replace_failed_node(self):
|
||||
"""Test zwave replace_failed_node service."""
|
||||
self.hass.services.call('zwave', 'replace_failed_node', {
|
||||
const.ATTR_NODE_ID: 13,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
replace_failed_node = zwave.NETWORK.controller.replace_failed_node
|
||||
assert replace_failed_node.called
|
||||
assert len(replace_failed_node.mock_calls) == 1
|
||||
assert replace_failed_node.mock_calls[0][1][0] == 13
|
||||
|
||||
def test_set_config_parameter(self):
|
||||
"""Test zwave set_config_parameter service."""
|
||||
value = MockValue(
|
||||
index=12,
|
||||
command_class=const.COMMAND_CLASS_CONFIGURATION,
|
||||
)
|
||||
value_list = MockValue(
|
||||
index=13,
|
||||
command_class=const.COMMAND_CLASS_CONFIGURATION,
|
||||
type=const.TYPE_LIST,
|
||||
data_items=['item1', 'item2', 'item3'],
|
||||
)
|
||||
node = MockNode(node_id=14)
|
||||
node.get_values.return_value = {12: value, 13: value_list}
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
|
||||
self.hass.services.call('zwave', 'set_config_parameter', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_PARAMETER: 13,
|
||||
const.ATTR_CONFIG_VALUE: 1,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert node.set_config_param.called
|
||||
assert len(node.set_config_param.mock_calls) == 1
|
||||
assert node.set_config_param.mock_calls[0][1][0] == 13
|
||||
assert node.set_config_param.mock_calls[0][1][1] == 1
|
||||
assert node.set_config_param.mock_calls[0][1][2] == 2
|
||||
node.set_config_param.reset_mock()
|
||||
|
||||
self.hass.services.call('zwave', 'set_config_parameter', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_PARAMETER: 13,
|
||||
const.ATTR_CONFIG_VALUE: 7,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert not node.set_config_param.called
|
||||
node.set_config_param.reset_mock()
|
||||
|
||||
self.hass.services.call('zwave', 'set_config_parameter', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_PARAMETER: 12,
|
||||
const.ATTR_CONFIG_VALUE: 0x01020304,
|
||||
const.ATTR_CONFIG_SIZE: 4,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert node.set_config_param.called
|
||||
assert len(node.set_config_param.mock_calls) == 1
|
||||
assert node.set_config_param.mock_calls[0][1][0] == 12
|
||||
assert node.set_config_param.mock_calls[0][1][1] == 0x01020304
|
||||
assert node.set_config_param.mock_calls[0][1][2] == 4
|
||||
node.set_config_param.reset_mock()
|
||||
|
||||
def test_print_config_parameter(self):
|
||||
"""Test zwave print_config_parameter service."""
|
||||
value1 = MockValue(
|
||||
index=12,
|
||||
command_class=const.COMMAND_CLASS_CONFIGURATION,
|
||||
data=1234,
|
||||
)
|
||||
value2 = MockValue(
|
||||
index=13,
|
||||
command_class=const.COMMAND_CLASS_CONFIGURATION,
|
||||
data=2345,
|
||||
)
|
||||
node = MockNode(node_id=14)
|
||||
node.values = {12: value1, 13: value2}
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
|
||||
with patch.object(zwave, '_LOGGER') as mock_logger:
|
||||
self.hass.services.call('zwave', 'print_config_parameter', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_PARAMETER: 13,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert mock_logger.info.called
|
||||
assert len(mock_logger.info.mock_calls) == 1
|
||||
assert mock_logger.info.mock_calls[0][1][1] == 13
|
||||
assert mock_logger.info.mock_calls[0][1][2] == 14
|
||||
assert mock_logger.info.mock_calls[0][1][3] == 2345
|
||||
|
||||
def test_print_node(self):
|
||||
"""Test zwave print_config_parameter service."""
|
||||
node1 = MockNode(node_id=14)
|
||||
node2 = MockNode(node_id=15)
|
||||
zwave.NETWORK.nodes = {14: node1, 15: node2}
|
||||
|
||||
with patch.object(zwave, 'pprint') as mock_pprint:
|
||||
self.hass.services.call('zwave', 'print_node', {
|
||||
const.ATTR_NODE_ID: 15,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert mock_pprint.called
|
||||
assert len(mock_pprint.mock_calls) == 1
|
||||
assert mock_pprint.mock_calls[0][1][0]['node_id'] == 15
|
||||
|
||||
def test_set_wakeup(self):
|
||||
"""Test zwave set_wakeup service."""
|
||||
value = MockValue(
|
||||
index=12,
|
||||
command_class=const.COMMAND_CLASS_WAKE_UP,
|
||||
)
|
||||
node = MockNode(node_id=14)
|
||||
node.values = {12: value}
|
||||
node.get_values.return_value = node.values
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
|
||||
self.hass.services.call('zwave', 'set_wakeup', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_VALUE: 15,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert value.data == 15
|
||||
|
||||
node.can_wake_up_value = False
|
||||
self.hass.services.call('zwave', 'set_wakeup', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_CONFIG_VALUE: 20,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert value.data == 15
|
||||
|
||||
def test_add_association(self):
|
||||
"""Test zwave change_association service."""
|
||||
ZWaveGroup = self.mock_openzwave.group.ZWaveGroup
|
||||
group = MagicMock()
|
||||
ZWaveGroup.return_value = group
|
||||
|
||||
value = MockValue(
|
||||
index=12,
|
||||
command_class=const.COMMAND_CLASS_WAKE_UP,
|
||||
)
|
||||
node = MockNode(node_id=14)
|
||||
node.values = {12: value}
|
||||
node.get_values.return_value = node.values
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
|
||||
self.hass.services.call('zwave', 'change_association', {
|
||||
const.ATTR_ASSOCIATION: 'add',
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_TARGET_NODE_ID: 24,
|
||||
const.ATTR_GROUP: 3,
|
||||
const.ATTR_INSTANCE: 5,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert ZWaveGroup.called
|
||||
assert len(ZWaveGroup.mock_calls) == 2
|
||||
assert ZWaveGroup.mock_calls[0][1][0] == 3
|
||||
assert ZWaveGroup.mock_calls[0][1][2] == 14
|
||||
assert group.add_association.called
|
||||
assert len(group.add_association.mock_calls) == 1
|
||||
assert group.add_association.mock_calls[0][1][0] == 24
|
||||
assert group.add_association.mock_calls[0][1][1] == 5
|
||||
|
||||
def test_remove_association(self):
|
||||
"""Test zwave change_association service."""
|
||||
ZWaveGroup = self.mock_openzwave.group.ZWaveGroup
|
||||
group = MagicMock()
|
||||
ZWaveGroup.return_value = group
|
||||
|
||||
value = MockValue(
|
||||
index=12,
|
||||
command_class=const.COMMAND_CLASS_WAKE_UP,
|
||||
)
|
||||
node = MockNode(node_id=14)
|
||||
node.values = {12: value}
|
||||
node.get_values.return_value = node.values
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
|
||||
self.hass.services.call('zwave', 'change_association', {
|
||||
const.ATTR_ASSOCIATION: 'remove',
|
||||
const.ATTR_NODE_ID: 14,
|
||||
const.ATTR_TARGET_NODE_ID: 24,
|
||||
const.ATTR_GROUP: 3,
|
||||
const.ATTR_INSTANCE: 5,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert ZWaveGroup.called
|
||||
assert len(ZWaveGroup.mock_calls) == 2
|
||||
assert ZWaveGroup.mock_calls[0][1][0] == 3
|
||||
assert ZWaveGroup.mock_calls[0][1][2] == 14
|
||||
assert group.remove_association.called
|
||||
assert len(group.remove_association.mock_calls) == 1
|
||||
assert group.remove_association.mock_calls[0][1][0] == 24
|
||||
assert group.remove_association.mock_calls[0][1][1] == 5
|
||||
|
||||
def test_refresh_entity(self):
|
||||
"""Test zwave refresh_entity service."""
|
||||
node = MockNode()
|
||||
value = MockValue(data=False, node=node,
|
||||
command_class=const.COMMAND_CLASS_SENSOR_BINARY)
|
||||
power_value = MockValue(data=50, node=node,
|
||||
command_class=const.COMMAND_CLASS_METER)
|
||||
values = MockEntityValues(primary=value, power=power_value)
|
||||
device = get_device(node=node, values=values, node_config={})
|
||||
device.hass = self.hass
|
||||
device.entity_id = 'binary_sensor.mock_entity_id'
|
||||
self.hass.add_job(device.async_added_to_hass())
|
||||
self.hass.block_till_done()
|
||||
|
||||
self.hass.services.call('zwave', 'refresh_entity', {
|
||||
ATTR_ENTITY_ID: 'binary_sensor.mock_entity_id',
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert node.refresh_value.called
|
||||
assert len(node.refresh_value.mock_calls) == 2
|
||||
self.assertEqual(sorted([node.refresh_value.mock_calls[0][1][0],
|
||||
node.refresh_value.mock_calls[1][1][0]]),
|
||||
sorted([value.value_id, power_value.value_id]))
|
||||
|
||||
def test_refresh_node(self):
|
||||
"""Test zwave refresh_node service."""
|
||||
node = MockNode(node_id=14)
|
||||
zwave.NETWORK.nodes = {14: node}
|
||||
self.hass.services.call('zwave', 'refresh_node', {
|
||||
const.ATTR_NODE_ID: 14,
|
||||
})
|
||||
self.hass.block_till_done()
|
||||
|
||||
assert node.refresh_info.called
|
||||
assert len(node.refresh_info.mock_calls) == 1
|
||||
|
@@ -1,49 +1,33 @@
|
||||
"""Test Z-Wave node entity."""
|
||||
import asyncio
|
||||
import unittest
|
||||
from unittest.mock import patch, Mock
|
||||
from tests.common import get_test_home_assistant
|
||||
from unittest.mock import patch
|
||||
import tests.mock.zwave as mock_zwave
|
||||
import pytest
|
||||
from homeassistant.components.zwave import node_entity
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('mock_openzwave')
|
||||
class TestZWaveBaseEntity(unittest.TestCase):
|
||||
"""Class to test ZWaveBaseEntity."""
|
||||
@asyncio.coroutine
|
||||
def test_maybe_schedule_update(hass, mock_openzwave):
|
||||
"""Test maybe schedule update."""
|
||||
base_entity = node_entity.ZWaveBaseEntity()
|
||||
base_entity.hass = hass
|
||||
|
||||
def setUp(self):
|
||||
"""Initialize values for this testcase class."""
|
||||
self.hass = get_test_home_assistant()
|
||||
with patch.object(hass.loop, 'call_later') as mock_call_later:
|
||||
base_entity._schedule_update()
|
||||
assert mock_call_later.called
|
||||
|
||||
def call_soon(time, func, *args):
|
||||
"""Replace call_later by call_soon."""
|
||||
return self.hass.loop.call_soon(func, *args)
|
||||
base_entity._schedule_update()
|
||||
assert len(mock_call_later.mock_calls) == 1
|
||||
|
||||
self.hass.loop.call_later = call_soon
|
||||
self.base_entity = node_entity.ZWaveBaseEntity()
|
||||
self.base_entity.hass = self.hass
|
||||
self.hass.start()
|
||||
do_update = mock_call_later.mock_calls[0][1][1]
|
||||
|
||||
def tearDown(self): # pylint: disable=invalid-name
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
with patch.object(hass, 'async_add_job') as mock_add_job:
|
||||
do_update()
|
||||
assert mock_add_job.called
|
||||
|
||||
def test_maybe_schedule_update(self):
|
||||
"""Test maybe_schedule_update."""
|
||||
with patch.object(self.base_entity, 'async_update_ha_state',
|
||||
Mock()) as mock_update:
|
||||
self.base_entity.maybe_schedule_update()
|
||||
self.hass.block_till_done()
|
||||
mock_update.assert_called_once_with()
|
||||
|
||||
def test_maybe_schedule_update_called_twice(self):
|
||||
"""Test maybe_schedule_update called twice."""
|
||||
with patch.object(self.base_entity, 'async_update_ha_state',
|
||||
Mock()) as mock_update:
|
||||
self.base_entity.maybe_schedule_update()
|
||||
self.base_entity.maybe_schedule_update()
|
||||
self.hass.block_till_done()
|
||||
mock_update.assert_called_once_with()
|
||||
base_entity._schedule_update()
|
||||
assert len(mock_call_later.mock_calls) == 2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('mock_openzwave')
|
||||
|
@@ -12,9 +12,9 @@ from homeassistant import util, setup
|
||||
from homeassistant.util import location
|
||||
from homeassistant.components import mqtt
|
||||
|
||||
from .common import async_test_home_assistant, mock_coro
|
||||
from .test_util.aiohttp import mock_aiohttp_client
|
||||
from .mock.zwave import SIGNAL_VALUE_CHANGED, SIGNAL_NODE, SIGNAL_NOTIFICATION
|
||||
from tests.common import async_test_home_assistant, mock_coro
|
||||
from tests.test_util.aiohttp import mock_aiohttp_client
|
||||
from tests.mock.zwave import MockNetwork
|
||||
|
||||
if os.environ.get('UVLOOP') == '1':
|
||||
import uvloop
|
||||
@@ -100,9 +100,7 @@ def mock_openzwave():
|
||||
base_mock = MagicMock()
|
||||
libopenzwave = base_mock.libopenzwave
|
||||
libopenzwave.__file__ = 'test'
|
||||
base_mock.network.ZWaveNetwork.SIGNAL_VALUE_CHANGED = SIGNAL_VALUE_CHANGED
|
||||
base_mock.network.ZWaveNetwork.SIGNAL_NODE = SIGNAL_NODE
|
||||
base_mock.network.ZWaveNetwork.SIGNAL_NOTIFICATION = SIGNAL_NOTIFICATION
|
||||
base_mock.network.ZWaveNetwork = MockNetwork
|
||||
|
||||
with patch.dict('sys.modules', {
|
||||
'libopenzwave': libopenzwave,
|
||||
|
@@ -3,15 +3,11 @@ from unittest.mock import MagicMock
|
||||
|
||||
from pydispatch import dispatcher
|
||||
|
||||
SIGNAL_VALUE_CHANGED = 'mock_value_changed'
|
||||
SIGNAL_NODE = 'mock_node'
|
||||
SIGNAL_NOTIFICATION = 'mock_notification'
|
||||
|
||||
|
||||
def value_changed(value):
|
||||
"""Fire a value changed."""
|
||||
dispatcher.send(
|
||||
SIGNAL_VALUE_CHANGED,
|
||||
MockNetwork.SIGNAL_VALUE_CHANGED,
|
||||
value=value,
|
||||
node=value.node,
|
||||
network=value.node._network
|
||||
@@ -21,7 +17,7 @@ def value_changed(value):
|
||||
def node_changed(node):
|
||||
"""Fire a node changed."""
|
||||
dispatcher.send(
|
||||
SIGNAL_NODE,
|
||||
MockNetwork.SIGNAL_NODE,
|
||||
node=node,
|
||||
network=node._network
|
||||
)
|
||||
@@ -30,12 +26,70 @@ def node_changed(node):
|
||||
def notification(node_id, network=None):
|
||||
"""Fire a notification."""
|
||||
dispatcher.send(
|
||||
SIGNAL_NOTIFICATION,
|
||||
MockNetwork.SIGNAL_NOTIFICATION,
|
||||
args={'nodeId': node_id},
|
||||
network=network
|
||||
)
|
||||
|
||||
|
||||
class MockNetwork(MagicMock):
|
||||
"""Mock Z-Wave network."""
|
||||
|
||||
SIGNAL_NETWORK_FAILED = 'mock_NetworkFailed'
|
||||
SIGNAL_NETWORK_STARTED = 'mock_NetworkStarted'
|
||||
SIGNAL_NETWORK_READY = 'mock_NetworkReady'
|
||||
SIGNAL_NETWORK_STOPPED = 'mock_NetworkStopped'
|
||||
SIGNAL_NETWORK_RESETTED = 'mock_DriverResetted'
|
||||
SIGNAL_NETWORK_AWAKED = 'mock_DriverAwaked'
|
||||
SIGNAL_DRIVER_FAILED = 'mock_DriverFailed'
|
||||
SIGNAL_DRIVER_READY = 'mock_DriverReady'
|
||||
SIGNAL_DRIVER_RESET = 'mock_DriverReset'
|
||||
SIGNAL_DRIVER_REMOVED = 'mock_DriverRemoved'
|
||||
SIGNAL_GROUP = 'mock_Group'
|
||||
SIGNAL_NODE = 'mock_Node'
|
||||
SIGNAL_NODE_ADDED = 'mock_NodeAdded'
|
||||
SIGNAL_NODE_EVENT = 'mock_NodeEvent'
|
||||
SIGNAL_NODE_NAMING = 'mock_NodeNaming'
|
||||
SIGNAL_NODE_NEW = 'mock_NodeNew'
|
||||
SIGNAL_NODE_PROTOCOL_INFO = 'mock_NodeProtocolInfo'
|
||||
SIGNAL_NODE_READY = 'mock_NodeReady'
|
||||
SIGNAL_NODE_REMOVED = 'mock_NodeRemoved'
|
||||
SIGNAL_SCENE_EVENT = 'mock_SceneEvent'
|
||||
SIGNAL_VALUE = 'mock_Value'
|
||||
SIGNAL_VALUE_ADDED = 'mock_ValueAdded'
|
||||
SIGNAL_VALUE_CHANGED = 'mock_ValueChanged'
|
||||
SIGNAL_VALUE_REFRESHED = 'mock_ValueRefreshed'
|
||||
SIGNAL_VALUE_REMOVED = 'mock_ValueRemoved'
|
||||
SIGNAL_POLLING_ENABLED = 'mock_PollingEnabled'
|
||||
SIGNAL_POLLING_DISABLED = 'mock_PollingDisabled'
|
||||
SIGNAL_CREATE_BUTTON = 'mock_CreateButton'
|
||||
SIGNAL_DELETE_BUTTON = 'mock_DeleteButton'
|
||||
SIGNAL_BUTTON_ON = 'mock_ButtonOn'
|
||||
SIGNAL_BUTTON_OFF = 'mock_ButtonOff'
|
||||
SIGNAL_ESSENTIAL_NODE_QUERIES_COMPLETE = \
|
||||
'mock_EssentialNodeQueriesComplete'
|
||||
SIGNAL_NODE_QUERIES_COMPLETE = 'mock_NodeQueriesComplete'
|
||||
SIGNAL_AWAKE_NODES_QUERIED = 'mock_AwakeNodesQueried'
|
||||
SIGNAL_ALL_NODES_QUERIED = 'mock_AllNodesQueried'
|
||||
SIGNAL_ALL_NODES_QUERIED_SOME_DEAD = 'mock_AllNodesQueriedSomeDead'
|
||||
SIGNAL_MSG_COMPLETE = 'mock_MsgComplete'
|
||||
SIGNAL_NOTIFICATION = 'mock_Notification'
|
||||
SIGNAL_CONTROLLER_COMMAND = 'mock_ControllerCommand'
|
||||
SIGNAL_CONTROLLER_WAITING = 'mock_ControllerWaiting'
|
||||
|
||||
STATE_STOPPED = 0
|
||||
STATE_FAILED = 1
|
||||
STATE_RESETTED = 3
|
||||
STATE_STARTED = 5
|
||||
STATE_AWAKED = 7
|
||||
STATE_READY = 10
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize a Z-Wave mock network."""
|
||||
super().__init__()
|
||||
self.state = MockNetwork.STATE_STOPPED
|
||||
|
||||
|
||||
class MockNode(MagicMock):
|
||||
"""Mock Z-Wave node."""
|
||||
|
||||
@@ -47,6 +101,7 @@ class MockNode(MagicMock):
|
||||
product_type='678',
|
||||
command_classes=None,
|
||||
can_wake_up_value=True,
|
||||
network=None,
|
||||
**kwargs):
|
||||
"""Initialize a Z-Wave mock node."""
|
||||
super().__init__()
|
||||
@@ -57,6 +112,8 @@ class MockNode(MagicMock):
|
||||
self.product_type = product_type
|
||||
self.can_wake_up_value = can_wake_up_value
|
||||
self._command_classes = command_classes or []
|
||||
if network is not None:
|
||||
self._network = network
|
||||
for attr_name in kwargs:
|
||||
setattr(self, attr_name, kwargs[attr_name])
|
||||
|
||||
@@ -84,30 +141,23 @@ class MockValue(MagicMock):
|
||||
|
||||
def __init__(self, *,
|
||||
label='Mock Value',
|
||||
data=None,
|
||||
data_items=None,
|
||||
node=None,
|
||||
instance=0,
|
||||
index=0,
|
||||
command_class=None,
|
||||
units=None,
|
||||
type=None,
|
||||
value_id=None):
|
||||
value_id=None,
|
||||
**kwargs):
|
||||
"""Initialize a Z-Wave mock value."""
|
||||
super().__init__()
|
||||
self.label = label
|
||||
self.data = data
|
||||
self.data_items = data_items
|
||||
self.node = node
|
||||
self.instance = instance
|
||||
self.index = index
|
||||
self.command_class = command_class
|
||||
self.units = units
|
||||
self.type = type
|
||||
if value_id is None:
|
||||
MockValue._mock_value_id += 1
|
||||
value_id = MockValue._mock_value_id
|
||||
self.value_id = value_id
|
||||
for attr_name in kwargs:
|
||||
setattr(self, attr_name, kwargs[attr_name])
|
||||
|
||||
def _get_child_mock(self, **kw):
|
||||
"""Create child mocks with right MagicMock class."""
|
||||
|
@@ -3,6 +3,7 @@
|
||||
import os
|
||||
import unittest
|
||||
import unittest.mock as mock
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
from voluptuous import MultipleInvalid
|
||||
@@ -205,6 +206,12 @@ class TestConfig(unittest.TestCase):
|
||||
},
|
||||
})
|
||||
|
||||
def test_customize_glob_is_ordered(self):
|
||||
"""Test that customize_glob preserves order."""
|
||||
conf = config_util.CORE_CONFIG_SCHEMA(
|
||||
{'customize_glob': OrderedDict()})
|
||||
self.assertIsInstance(conf['customize_glob'], OrderedDict)
|
||||
|
||||
def _compute_state(self, config):
|
||||
run_coroutine_threadsafe(
|
||||
config_util.async_process_ha_core_config(self.hass, config),
|
||||
|
@@ -5,6 +5,7 @@ import unittest
|
||||
from unittest.mock import patch, MagicMock, sentinel
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import logging
|
||||
import pytz
|
||||
import pytest
|
||||
|
||||
@@ -16,8 +17,7 @@ from homeassistant.util.unit_system import (METRIC_SYSTEM)
|
||||
from homeassistant.const import (
|
||||
__version__, EVENT_STATE_CHANGED, ATTR_FRIENDLY_NAME, CONF_UNIT_SYSTEM,
|
||||
ATTR_NOW, EVENT_TIME_CHANGED, EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_HOMEASSISTANT_CLOSE, EVENT_HOMEASSISTANT_START,
|
||||
EVENT_SERVICE_REGISTERED, EVENT_SERVICE_REMOVED)
|
||||
EVENT_HOMEASSISTANT_CLOSE, EVENT_SERVICE_REGISTERED, EVENT_SERVICE_REMOVED)
|
||||
|
||||
from tests.common import get_test_home_assistant
|
||||
|
||||
@@ -813,28 +813,21 @@ def test_create_timer(mock_monotonic, loop):
|
||||
funcs.append(func)
|
||||
return orig_callback(func)
|
||||
|
||||
with patch.object(ha, 'callback', mock_callback):
|
||||
ha._async_create_timer(hass)
|
||||
|
||||
assert len(funcs) == 3
|
||||
fire_time_event, start_timer, stop_timer = funcs
|
||||
|
||||
assert len(hass.bus.async_listen_once.mock_calls) == 1
|
||||
event_type, callback = hass.bus.async_listen_once.mock_calls[0][1]
|
||||
assert event_type == EVENT_HOMEASSISTANT_START
|
||||
assert callback is start_timer
|
||||
|
||||
mock_monotonic.side_effect = 10.2, 10.3
|
||||
|
||||
with patch('homeassistant.core.dt_util.utcnow',
|
||||
return_value=sentinel.mock_date):
|
||||
start_timer(None)
|
||||
with patch.object(ha, 'callback', mock_callback), \
|
||||
patch('homeassistant.core.dt_util.utcnow',
|
||||
return_value=sentinel.mock_date):
|
||||
ha._async_create_timer(hass)
|
||||
|
||||
assert len(hass.bus.async_listen_once.mock_calls) == 2
|
||||
assert len(funcs) == 2
|
||||
fire_time_event, stop_timer = funcs
|
||||
|
||||
assert len(hass.bus.async_listen_once.mock_calls) == 1
|
||||
assert len(hass.bus.async_fire.mock_calls) == 1
|
||||
assert len(hass.loop.call_later.mock_calls) == 1
|
||||
|
||||
event_type, callback = hass.bus.async_listen_once.mock_calls[1][1]
|
||||
event_type, callback = hass.bus.async_listen_once.mock_calls[0][1]
|
||||
assert event_type == EVENT_HOMEASSISTANT_STOP
|
||||
assert callback is stop_timer
|
||||
|
||||
@@ -859,17 +852,15 @@ def test_timer_out_of_sync(mock_monotonic, loop):
|
||||
funcs.append(func)
|
||||
return orig_callback(func)
|
||||
|
||||
with patch.object(ha, 'callback', mock_callback):
|
||||
ha._async_create_timer(hass)
|
||||
|
||||
assert len(funcs) == 3
|
||||
fire_time_event, start_timer, stop_timer = funcs
|
||||
|
||||
mock_monotonic.side_effect = 10.2, 11.3, 11.3
|
||||
|
||||
with patch('homeassistant.core.dt_util.utcnow',
|
||||
return_value=sentinel.mock_date):
|
||||
start_timer(None)
|
||||
with patch.object(ha, 'callback', mock_callback), \
|
||||
patch('homeassistant.core.dt_util.utcnow',
|
||||
return_value=sentinel.mock_date):
|
||||
ha._async_create_timer(hass)
|
||||
|
||||
assert len(funcs) == 2
|
||||
fire_time_event, stop_timer = funcs
|
||||
|
||||
assert len(hass.loop.call_later.mock_calls) == 1
|
||||
|
||||
@@ -877,3 +868,45 @@ def test_timer_out_of_sync(mock_monotonic, loop):
|
||||
assert slp_seconds == 1
|
||||
assert callback is fire_time_event
|
||||
assert abs(nxt - 12.3) < 0.001
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_hass_start_starts_the_timer(loop):
|
||||
"""Test when hass starts, it starts the timer."""
|
||||
hass = ha.HomeAssistant(loop=loop)
|
||||
|
||||
try:
|
||||
with patch('homeassistant.core._async_create_timer') as mock_timer:
|
||||
yield from hass.async_start()
|
||||
|
||||
assert hass.state == ha.CoreState.running
|
||||
assert not hass._track_task
|
||||
assert len(mock_timer.mock_calls) == 1
|
||||
assert mock_timer.mock_calls[0][1][0] is hass
|
||||
|
||||
finally:
|
||||
yield from hass.async_stop()
|
||||
assert hass.state == ha.CoreState.not_running
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def test_start_taking_too_long(loop, caplog):
|
||||
"""Test when async_start takes too long."""
|
||||
hass = ha.HomeAssistant(loop=loop)
|
||||
caplog.set_level(logging.WARNING)
|
||||
|
||||
try:
|
||||
with patch('homeassistant.core.timeout',
|
||||
side_effect=asyncio.TimeoutError), \
|
||||
patch('homeassistant.core._async_create_timer') as mock_timer:
|
||||
yield from hass.async_start()
|
||||
|
||||
assert not hass._track_task
|
||||
assert hass.state == ha.CoreState.running
|
||||
assert len(mock_timer.mock_calls) == 1
|
||||
assert mock_timer.mock_calls[0][1][0] is hass
|
||||
assert 'Something is blocking Home Assistant' in caplog.text
|
||||
|
||||
finally:
|
||||
yield from hass.async_stop()
|
||||
assert hass.state == ha.CoreState.not_running
|
||||
|
@@ -1,9 +1,6 @@
|
||||
"""Test Home Assistant remote methods and classes."""
|
||||
# pylint: disable=protected-access
|
||||
import asyncio
|
||||
import threading
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant import remote, setup, core as ha
|
||||
import homeassistant.components.http as http
|
||||
@@ -11,18 +8,17 @@ from homeassistant.const import HTTP_HEADER_HA_AUTH, EVENT_STATE_CHANGED
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import (
|
||||
get_test_instance_port, get_test_home_assistant, get_test_config_dir)
|
||||
get_test_instance_port, get_test_home_assistant)
|
||||
|
||||
API_PASSWORD = 'test1234'
|
||||
MASTER_PORT = get_test_instance_port()
|
||||
SLAVE_PORT = get_test_instance_port()
|
||||
BROKEN_PORT = get_test_instance_port()
|
||||
HTTP_BASE_URL = 'http://127.0.0.1:{}'.format(MASTER_PORT)
|
||||
|
||||
HA_HEADERS = {HTTP_HEADER_HA_AUTH: API_PASSWORD}
|
||||
|
||||
broken_api = remote.API('127.0.0.1', "bladybla", port=get_test_instance_port())
|
||||
hass, slave, master_api = None, None, None
|
||||
hass, master_api = None, None
|
||||
|
||||
|
||||
def _url(path=''):
|
||||
@@ -32,8 +28,8 @@ def _url(path=''):
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def setUpModule():
|
||||
"""Initalization of a Home Assistant server and Slave instance."""
|
||||
global hass, slave, master_api
|
||||
"""Initalization of a Home Assistant server instance."""
|
||||
global hass, master_api
|
||||
|
||||
hass = get_test_home_assistant()
|
||||
|
||||
@@ -51,30 +47,10 @@ def setUpModule():
|
||||
|
||||
master_api = remote.API('127.0.0.1', API_PASSWORD, MASTER_PORT)
|
||||
|
||||
# Start slave
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
# FIXME: should not be a daemon
|
||||
threading.Thread(name='SlaveThread', daemon=True,
|
||||
target=loop.run_forever).start()
|
||||
|
||||
slave = remote.HomeAssistant(master_api, loop=loop)
|
||||
slave.async_track_tasks()
|
||||
slave.config.config_dir = get_test_config_dir()
|
||||
slave.config.skip_pip = True
|
||||
setup.setup_component(
|
||||
slave, http.DOMAIN,
|
||||
{http.DOMAIN: {http.CONF_API_PASSWORD: API_PASSWORD,
|
||||
http.CONF_SERVER_PORT: SLAVE_PORT}})
|
||||
|
||||
with patch.object(ha, '_async_create_timer', return_value=None):
|
||||
slave.start()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def tearDownModule():
|
||||
"""Stop the Home Assistant server and slave."""
|
||||
slave.stop()
|
||||
"""Stop the Home Assistant server."""
|
||||
hass.stop()
|
||||
|
||||
|
||||
@@ -83,7 +59,6 @@ class TestRemoteMethods(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
"""Stop everything that was started."""
|
||||
slave.block_till_done()
|
||||
hass.block_till_done()
|
||||
|
||||
def test_validate_api(self):
|
||||
@@ -228,89 +203,3 @@ class TestRemoteMethods(unittest.TestCase):
|
||||
|
||||
now = dt_util.utcnow()
|
||||
self.assertEqual(now.isoformat(), ha_json_enc.default(now))
|
||||
|
||||
|
||||
class TestRemoteClasses(unittest.TestCase):
|
||||
"""Test the homeassistant.remote module."""
|
||||
|
||||
def tearDown(self):
|
||||
"""Stop everything that was started."""
|
||||
slave.block_till_done()
|
||||
hass.block_till_done()
|
||||
|
||||
def test_home_assistant_init(self):
|
||||
"""Test HomeAssistant init."""
|
||||
# Wrong password
|
||||
self.assertRaises(
|
||||
ha.HomeAssistantError, remote.HomeAssistant,
|
||||
remote.API('127.0.0.1', API_PASSWORD + 'A', 8124))
|
||||
|
||||
# Wrong port
|
||||
self.assertRaises(
|
||||
ha.HomeAssistantError, remote.HomeAssistant,
|
||||
remote.API('127.0.0.1', API_PASSWORD, BROKEN_PORT))
|
||||
|
||||
def test_statemachine_init(self):
|
||||
"""Test if remote.StateMachine copies all states on init."""
|
||||
self.assertEqual(sorted(hass.states.all()),
|
||||
sorted(slave.states.all()))
|
||||
|
||||
def test_statemachine_set(self):
|
||||
"""Test if setting the state on a slave is recorded."""
|
||||
slave.states.set("remote.test", "remote.statemachine test")
|
||||
|
||||
# Wait till slave tells master
|
||||
slave.block_till_done()
|
||||
# Wait till master gives updated state
|
||||
hass.block_till_done()
|
||||
|
||||
self.assertEqual("remote.statemachine test",
|
||||
slave.states.get("remote.test").state)
|
||||
|
||||
def test_statemachine_remove_from_master(self):
|
||||
"""Remove statemachine from master."""
|
||||
hass.states.set("remote.master_remove", "remove me!")
|
||||
hass.block_till_done()
|
||||
slave.block_till_done()
|
||||
|
||||
self.assertIn('remote.master_remove', slave.states.entity_ids())
|
||||
|
||||
hass.states.remove("remote.master_remove")
|
||||
hass.block_till_done()
|
||||
slave.block_till_done()
|
||||
|
||||
self.assertNotIn('remote.master_remove', slave.states.entity_ids())
|
||||
|
||||
def test_statemachine_remove_from_slave(self):
|
||||
"""Remove statemachine from slave."""
|
||||
hass.states.set("remote.slave_remove", "remove me!")
|
||||
hass.block_till_done()
|
||||
|
||||
self.assertIn('remote.slave_remove', slave.states.entity_ids())
|
||||
|
||||
self.assertTrue(slave.states.remove("remote.slave_remove"))
|
||||
slave.block_till_done()
|
||||
hass.block_till_done()
|
||||
|
||||
self.assertNotIn('remote.slave_remove', slave.states.entity_ids())
|
||||
|
||||
def test_eventbus_fire(self):
|
||||
"""Test if events fired from the eventbus get fired."""
|
||||
hass_call = []
|
||||
slave_call = []
|
||||
|
||||
hass.bus.listen("test.event_no_data", lambda _: hass_call.append(1))
|
||||
slave.bus.listen("test.event_no_data", lambda _: slave_call.append(1))
|
||||
slave.bus.fire("test.event_no_data")
|
||||
|
||||
# Wait till slave tells master
|
||||
slave.block_till_done()
|
||||
# Wait till master gives updated event
|
||||
hass.block_till_done()
|
||||
|
||||
self.assertEqual(1, len(hass_call))
|
||||
self.assertEqual(1, len(slave_call))
|
||||
|
||||
def test_get_config(self):
|
||||
"""Test the return of the configuration."""
|
||||
self.assertEqual(hass.config.as_dict(), remote.get_config(master_api))
|
||||
|
@@ -75,8 +75,10 @@ class AiohttpClientMocker:
|
||||
@asyncio.coroutine
|
||||
# pylint: disable=unused-variable
|
||||
def match_request(self, method, url, *, data=None, auth=None, params=None,
|
||||
headers=None, allow_redirects=None):
|
||||
headers=None, allow_redirects=None, timeout=None,
|
||||
json=None):
|
||||
"""Match a request against pre-registered requests."""
|
||||
data = data or json
|
||||
for response in self._mocks:
|
||||
if response.match_request(method, url, params):
|
||||
self.mock_calls.append((method, url, data))
|
||||
|
@@ -31,6 +31,12 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual("test_more", util.slugify("Test More"))
|
||||
self.assertEqual("test_more", util.slugify("Test_(More)"))
|
||||
self.assertEqual("test_more", util.slugify("Tèst_Mörê"))
|
||||
self.assertEqual("b827eb000000", util.slugify("B8:27:EB:00:00:00"))
|
||||
self.assertEqual("testcom", util.slugify("test.com"))
|
||||
self.assertEqual("greg_phone__exp_wayp1",
|
||||
util.slugify("greg_phone - exp_wayp1"))
|
||||
self.assertEqual("we_are_we_are_a_test_calendar",
|
||||
util.slugify("We are, we are, a... Test Calendar"))
|
||||
|
||||
def test_repr_helper(self):
|
||||
"""Test repr_helper."""
|
||||
|
Reference in New Issue
Block a user