forked from home-assistant/core
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22f68d70a7 | ||
|
|
bf85e18d45 | ||
|
|
09c43e8854 | ||
|
|
e5cbf01ce1 | ||
|
|
fe2e5089ab | ||
|
|
35ffac1e01 | ||
|
|
362f23a950 | ||
|
|
dc8d4ac8e4 | ||
|
|
0cdea28e2a | ||
|
|
7d1a02feb1 | ||
|
|
958b894020 | ||
|
|
5c8f209aa7 | ||
|
|
3eeccc1a65 | ||
|
|
52e33c2aa2 | ||
|
|
35f5784287 | ||
|
|
46cc6e199b | ||
|
|
6371eca14d | ||
|
|
052641e620 | ||
|
|
16edcd9938 | ||
|
|
4fa6f2e54f | ||
|
|
9be1b72ed7 | ||
|
|
bfc8d2457c |
@@ -16,6 +16,7 @@ variables:
|
||||
- group: docker
|
||||
- group: wheels
|
||||
- group: github
|
||||
- group: twine
|
||||
|
||||
|
||||
jobs:
|
||||
@@ -24,7 +25,7 @@ jobs:
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 3
|
||||
matrix:
|
||||
@@ -114,15 +115,53 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
jq curl
|
||||
|
||||
release="$(Build.SourceBranchName)"
|
||||
created_by="$(curl -s https://api.github.com/repos/home-assistant/home-assistant/releases/tags/${release} | jq --raw-output '.author.login')"
|
||||
|
||||
if [[ "${created_by}" =~ ^(balloob|pvizeli|fabaff|robbiet480)$ ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "${created_by} is not allowed to create an release!"
|
||||
exit 1
|
||||
displayName: 'Check rights'
|
||||
|
||||
|
||||
- job: 'Release'
|
||||
- job: 'ReleasePython'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('VersionValidate'))
|
||||
dependsOn:
|
||||
- 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install twine wheel
|
||||
displayName: 'Install tools'
|
||||
- script: python setup.py sdist bdist_wheel
|
||||
displayName: 'Build package'
|
||||
- script: |
|
||||
export TWINE_USERNAME="$(twineUser)"
|
||||
export TWINE_PASSWORD="$(twinePassword)"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
displayName: 'Upload pypi'
|
||||
|
||||
|
||||
- job: 'ReleaseDocker'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('VersionValidate'))
|
||||
dependsOn:
|
||||
- 'VersionValidate'
|
||||
timeoutInMinutes: 120
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
@@ -167,16 +206,17 @@ jobs:
|
||||
displayName: 'Build Release'
|
||||
|
||||
|
||||
- job: 'ReleasePublish'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('Release'))
|
||||
- job: 'ReleaseHassio'
|
||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('ReleaseDocker'))
|
||||
dependsOn:
|
||||
- 'Release'
|
||||
- 'ReleaseDocker'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
git jq
|
||||
git jq curl
|
||||
|
||||
git config --global user.name "Pascal Vizeli"
|
||||
git config --global user.email "pvizeli@syshack.ch"
|
||||
|
||||
@@ -155,6 +155,13 @@ class AxisFlowHandler(config_entries.ConfigFlow):
|
||||
return self.async_abort(reason='link_local_address')
|
||||
|
||||
serialnumber = discovery_info['properties']['macaddress']
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
self.context['macaddress'] = serialnumber
|
||||
|
||||
if any(serialnumber == flow['context']['macaddress']
|
||||
for flow in self._async_in_progress()):
|
||||
return self.async_abort(reason='already_in_progress')
|
||||
|
||||
device_entries = configured_devices(self.hass)
|
||||
|
||||
if serialnumber in device_entries:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Axis",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/components/axis",
|
||||
"requirements": ["axis==23"],
|
||||
"requirements": ["axis==24"],
|
||||
"dependencies": [],
|
||||
"zeroconf": ["_axis-video._tcp.local."],
|
||||
"codeowners": ["@kane610"]
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
},
|
||||
"error": {
|
||||
"already_configured": "Device is already configured",
|
||||
"already_in_progress": "Config flow for device is already in progress.",
|
||||
"device_unavailable": "Device is not available",
|
||||
"faulty_credentials": "Bad user credentials"
|
||||
},
|
||||
|
||||
@@ -94,7 +94,8 @@ class EsphomeFlowHandler(config_entries.ConfigFlow):
|
||||
data = self.hass.data[DATA_KEY][
|
||||
entry.entry_id] # type: RuntimeEntryData
|
||||
# Node names are unique in the network
|
||||
already_configured = data.device_info.name == node_name
|
||||
if data.device_info is not None:
|
||||
already_configured = data.device_info.name == node_name
|
||||
|
||||
if already_configured:
|
||||
return self.async_abort(
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Fitbit",
|
||||
"documentation": "https://www.home-assistant.io/components/fitbit",
|
||||
"requirements": [
|
||||
"fitbit==0.3.0"
|
||||
"fitbit==0.3.1"
|
||||
],
|
||||
"dependencies": [
|
||||
"configurator",
|
||||
|
||||
@@ -4,9 +4,10 @@ import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp import web, web_urldispatcher, hdrs
|
||||
import voluptuous as vol
|
||||
import jinja2
|
||||
from yarl import URL
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
@@ -50,7 +51,6 @@ for size in (192, 384, 512, 1024):
|
||||
'type': 'image/png'
|
||||
})
|
||||
|
||||
DATA_FINALIZE_PANEL = 'frontend_finalize_panel'
|
||||
DATA_PANELS = 'frontend_panels'
|
||||
DATA_JS_VERSION = 'frontend_js_version'
|
||||
DATA_EXTRA_HTML_URL = 'frontend_extra_html_url'
|
||||
@@ -97,28 +97,6 @@ SCHEMA_GET_TRANSLATIONS = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
def generate_negative_index_regex():
|
||||
"""Generate regex for index."""
|
||||
skip = [
|
||||
# files
|
||||
"service_worker.js",
|
||||
"robots.txt",
|
||||
"onboarding.html",
|
||||
"manifest.json",
|
||||
]
|
||||
for folder in (
|
||||
"static",
|
||||
"frontend_latest",
|
||||
"frontend_es5",
|
||||
"local",
|
||||
"auth",
|
||||
"api",
|
||||
):
|
||||
# Regex matching static, static/, static/index.html
|
||||
skip.append("{}(/|/.+|)".format(folder))
|
||||
return r"(?!(" + "|".join(skip) + r")).*"
|
||||
|
||||
|
||||
class Panel:
|
||||
"""Abstract class for panels."""
|
||||
|
||||
@@ -256,7 +234,7 @@ async def async_setup(hass, config):
|
||||
if os.path.isdir(local):
|
||||
hass.http.register_static_path("/local", local, not is_dev)
|
||||
|
||||
hass.http.register_view(IndexView(repo_path))
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
for panel in ('kiosk', 'states', 'profile'):
|
||||
async_register_built_in_panel(hass, panel)
|
||||
@@ -327,21 +305,64 @@ def _async_setup_themes(hass, themes):
|
||||
hass.services.async_register(DOMAIN, SERVICE_RELOAD_THEMES, reload_themes)
|
||||
|
||||
|
||||
class IndexView(HomeAssistantView):
|
||||
class IndexView(web_urldispatcher.AbstractResource):
|
||||
"""Serve the frontend."""
|
||||
|
||||
url = '/'
|
||||
name = 'frontend:index'
|
||||
requires_auth = False
|
||||
extra_urls = [
|
||||
"/{extra:%s}" % generate_negative_index_regex()
|
||||
]
|
||||
|
||||
def __init__(self, repo_path):
|
||||
def __init__(self, repo_path, hass):
|
||||
"""Initialize the frontend view."""
|
||||
super().__init__(name="frontend:index")
|
||||
self.repo_path = repo_path
|
||||
self.hass = hass
|
||||
self._template_cache = None
|
||||
|
||||
@property
|
||||
def canonical(self) -> str:
|
||||
"""Return resource's canonical path."""
|
||||
return '/'
|
||||
|
||||
@property
|
||||
def _route(self):
|
||||
"""Return the index route."""
|
||||
return web_urldispatcher.ResourceRoute('GET', self.get, self)
|
||||
|
||||
def url_for(self, **kwargs: str) -> URL:
|
||||
"""Construct url for resource with additional params."""
|
||||
return URL("/")
|
||||
|
||||
async def resolve(self, request: web.Request):
|
||||
"""Resolve resource.
|
||||
|
||||
Return (UrlMappingMatchInfo, allowed_methods) pair.
|
||||
"""
|
||||
if (request.path != '/' and
|
||||
request.url.parts[1] not in self.hass.data[DATA_PANELS]):
|
||||
return None, set()
|
||||
|
||||
if request.method != hdrs.METH_GET:
|
||||
return None, {'GET'}
|
||||
|
||||
return web_urldispatcher.UrlMappingMatchInfo({}, self._route), {'GET'}
|
||||
|
||||
def add_prefix(self, prefix: str) -> None:
|
||||
"""Add a prefix to processed URLs.
|
||||
|
||||
Required for subapplications support.
|
||||
"""
|
||||
|
||||
def get_info(self):
|
||||
"""Return a dict with additional info useful for introspection."""
|
||||
return {
|
||||
'panels': list(self.hass.data[DATA_PANELS])
|
||||
}
|
||||
|
||||
def freeze(self) -> None:
|
||||
"""Freeze the resource."""
|
||||
pass
|
||||
|
||||
def raw_match(self, path: str) -> bool:
|
||||
"""Perform a raw match against path."""
|
||||
pass
|
||||
|
||||
def get_template(self):
|
||||
"""Get template."""
|
||||
tpl = self._template_cache
|
||||
@@ -357,14 +378,10 @@ class IndexView(HomeAssistantView):
|
||||
|
||||
return tpl
|
||||
|
||||
async def get(self, request, extra=None):
|
||||
"""Serve the index view."""
|
||||
async def get(self, request: web.Request):
|
||||
"""Serve the index page for panel pages."""
|
||||
hass = request.app['hass']
|
||||
|
||||
if (request.path != '/' and
|
||||
request.url.parts[1] not in hass.data[DATA_PANELS]):
|
||||
raise web.HTTPNotFound
|
||||
|
||||
if not hass.components.onboarding.async_is_onboarded():
|
||||
return web.Response(status=302, headers={
|
||||
'location': '/onboarding.html'
|
||||
@@ -383,6 +400,14 @@ class IndexView(HomeAssistantView):
|
||||
content_type='text/html'
|
||||
)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return length of resource."""
|
||||
return 1
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate over routes."""
|
||||
return iter([self._route])
|
||||
|
||||
|
||||
class ManifestJSONView(HomeAssistantView):
|
||||
"""View to return a manifest.json."""
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/components/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20190530.0"
|
||||
"home-assistant-frontend==20190601.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
|
||||
@@ -43,6 +43,7 @@ class GeofencyEntity(DeviceTrackerEntity):
|
||||
self._location_name = location_name
|
||||
self._gps = gps
|
||||
self._unsub_dispatcher = None
|
||||
self._unique_id = device
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
@@ -74,6 +75,19 @@ class GeofencyEntity(DeviceTrackerEntity):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the unique ID."""
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info."""
|
||||
return {
|
||||
'name': self._name,
|
||||
'identifiers': {(GF_DOMAIN, self._unique_id)},
|
||||
}
|
||||
|
||||
@property
|
||||
def source_type(self):
|
||||
"""Return the source type, eg gps or router, of the device."""
|
||||
|
||||
@@ -45,6 +45,7 @@ class GPSLoggerEntity(DeviceTrackerEntity):
|
||||
self._battery = battery
|
||||
self._location = location
|
||||
self._unsub_dispatcher = None
|
||||
self._unique_id = device
|
||||
|
||||
@property
|
||||
def battery_level(self):
|
||||
@@ -81,6 +82,19 @@ class GPSLoggerEntity(DeviceTrackerEntity):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the unique ID."""
|
||||
return self._unique_id
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info."""
|
||||
return {
|
||||
'name': self._name,
|
||||
'identifiers': {(GPL_DOMAIN, self._unique_id)},
|
||||
}
|
||||
|
||||
@property
|
||||
def source_type(self):
|
||||
"""Return the source type, eg gps or router, of the device."""
|
||||
|
||||
@@ -119,8 +119,12 @@ class HassIOIngress(HomeAssistantView):
|
||||
source_header = _init_header(request, token)
|
||||
|
||||
async with self._websession.request(
|
||||
request.method, url, headers=source_header,
|
||||
params=request.query, data=data
|
||||
request.method,
|
||||
url,
|
||||
headers=source_header,
|
||||
params=request.query,
|
||||
allow_redirects=False,
|
||||
data=data
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
|
||||
@@ -126,14 +126,16 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow):
|
||||
# It changes if a device is factory reset.
|
||||
hkid = properties['id']
|
||||
model = properties['md']
|
||||
|
||||
name = discovery_info['name'].replace('._hap._tcp.local.', '')
|
||||
status_flags = int(properties['sf'])
|
||||
paired = not status_flags & 0x01
|
||||
|
||||
_LOGGER.debug("Discovered device %s (%s - %s)", name, model, hkid)
|
||||
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
self.context['hkid'] = hkid
|
||||
self.context['title_placeholders'] = {
|
||||
'name': discovery_info['name'].replace('._hap._tcp.local.', ''),
|
||||
'name': name,
|
||||
}
|
||||
|
||||
# If multiple HomekitControllerFlowHandler end up getting created
|
||||
|
||||
@@ -7,6 +7,11 @@
|
||||
"aiolifx==0.6.7",
|
||||
"aiolifx_effects==0.2.2"
|
||||
],
|
||||
"homekit": {
|
||||
"models": [
|
||||
"LIFX"
|
||||
]
|
||||
},
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@amelchio"
|
||||
|
||||
@@ -7,13 +7,15 @@ from homeassistant.helpers.typing import ConfigType, HomeAssistantType
|
||||
from .const import (ATTR_DEVICE_ID, ATTR_DEVICE_NAME,
|
||||
ATTR_MANUFACTURER, ATTR_MODEL, ATTR_OS_VERSION,
|
||||
DATA_BINARY_SENSOR, DATA_CONFIG_ENTRIES, DATA_DELETED_IDS,
|
||||
DATA_DEVICES, DATA_SENSOR, DATA_STORE, DOMAIN, STORAGE_KEY,
|
||||
STORAGE_VERSION)
|
||||
DATA_DEVICES, DATA_DEVICE_TRACKER, DATA_SENSOR, DATA_STORE,
|
||||
DOMAIN, STORAGE_KEY, STORAGE_VERSION)
|
||||
|
||||
from .http_api import RegistrationsView
|
||||
from .webhook import handle_webhook
|
||||
from .websocket_api import register_websocket_handlers
|
||||
|
||||
PLATFORMS = 'sensor', 'binary_sensor', 'device_tracker'
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
"""Set up the mobile app component."""
|
||||
@@ -24,7 +26,6 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
DATA_BINARY_SENSOR: {},
|
||||
DATA_CONFIG_ENTRIES: {},
|
||||
DATA_DELETED_IDS: [],
|
||||
DATA_DEVICES: {},
|
||||
DATA_SENSOR: {}
|
||||
}
|
||||
|
||||
@@ -33,6 +34,7 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
DATA_CONFIG_ENTRIES: {},
|
||||
DATA_DELETED_IDS: app_config.get(DATA_DELETED_IDS, []),
|
||||
DATA_DEVICES: {},
|
||||
DATA_DEVICE_TRACKER: {},
|
||||
DATA_SENSOR: app_config.get(DATA_SENSOR, {}),
|
||||
DATA_STORE: store,
|
||||
}
|
||||
@@ -83,10 +85,8 @@ async def async_setup_entry(hass, entry):
|
||||
webhook_register(hass, DOMAIN, registration_name, webhook_id,
|
||||
handle_webhook)
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry,
|
||||
DATA_BINARY_SENSOR))
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, DATA_SENSOR))
|
||||
for domain in PLATFORMS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, domain))
|
||||
|
||||
return True
|
||||
|
||||
@@ -25,6 +25,7 @@ DATA_BINARY_SENSOR = 'binary_sensor'
|
||||
DATA_CONFIG_ENTRIES = 'config_entries'
|
||||
DATA_DELETED_IDS = 'deleted_ids'
|
||||
DATA_DEVICES = 'devices'
|
||||
DATA_DEVICE_TRACKER = 'device_tracker'
|
||||
DATA_SENSOR = 'sensor'
|
||||
DATA_STORE = 'store'
|
||||
|
||||
@@ -160,6 +161,7 @@ SENSOR_TYPES = [ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR]
|
||||
COMBINED_CLASSES = sorted(set(BINARY_SENSOR_CLASSES + SENSOR_CLASSES))
|
||||
|
||||
SIGNAL_SENSOR_UPDATE = DOMAIN + '_sensor_update'
|
||||
SIGNAL_LOCATION_UPDATE = DOMAIN + '_location_update_{}'
|
||||
|
||||
REGISTER_SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_SENSOR_ATTRIBUTES, default={}): dict,
|
||||
|
||||
137
homeassistant/components/mobile_app/device_tracker.py
Normal file
137
homeassistant/components/mobile_app/device_tracker.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Device tracker platform that adds support for OwnTracks over MQTT."""
|
||||
import logging
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.components.device_tracker.const import (
|
||||
DOMAIN, SOURCE_TYPE_GPS)
|
||||
from homeassistant.components.device_tracker.config_entry import (
|
||||
DeviceTrackerEntity
|
||||
)
|
||||
from .const import (
|
||||
DOMAIN as MA_DOMAIN,
|
||||
|
||||
ATTR_ALTITUDE,
|
||||
ATTR_BATTERY,
|
||||
ATTR_COURSE,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_DEVICE_NAME,
|
||||
ATTR_GPS_ACCURACY,
|
||||
ATTR_GPS,
|
||||
ATTR_LOCATION_NAME,
|
||||
ATTR_SPEED,
|
||||
ATTR_VERTICAL_ACCURACY,
|
||||
|
||||
SIGNAL_LOCATION_UPDATE,
|
||||
)
|
||||
from .helpers import device_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry, async_add_entities):
|
||||
"""Set up OwnTracks based off an entry."""
|
||||
@callback
|
||||
def _receive_data(data):
|
||||
"""Receive set location."""
|
||||
dev_id = entry.data[ATTR_DEVICE_ID]
|
||||
device = hass.data[MA_DOMAIN][DOMAIN].get(dev_id)
|
||||
|
||||
if device is not None:
|
||||
device.update_data(data)
|
||||
return
|
||||
|
||||
device = hass.data[MA_DOMAIN][DOMAIN][dev_id] = MobileAppEntity(
|
||||
entry, data
|
||||
)
|
||||
async_add_entities([device])
|
||||
|
||||
hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_LOCATION_UPDATE.format(entry.entry_id), _receive_data)
|
||||
return True
|
||||
|
||||
|
||||
class MobileAppEntity(DeviceTrackerEntity):
|
||||
"""Represent a tracked device."""
|
||||
|
||||
def __init__(self, entry, data):
|
||||
"""Set up OwnTracks entity."""
|
||||
self._entry = entry
|
||||
self._data = data
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the unique ID."""
|
||||
return self._entry.data[ATTR_DEVICE_ID]
|
||||
|
||||
@property
|
||||
def battery_level(self):
|
||||
"""Return the battery level of the device."""
|
||||
return self._data.get(ATTR_BATTERY)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return device specific attributes."""
|
||||
attrs = {}
|
||||
for key in (ATTR_ALTITUDE, ATTR_COURSE,
|
||||
ATTR_SPEED, ATTR_VERTICAL_ACCURACY):
|
||||
value = self._data.get(key)
|
||||
if value is not None:
|
||||
attrs[key] = value
|
||||
|
||||
return attrs
|
||||
|
||||
@property
|
||||
def location_accuracy(self):
|
||||
"""Return the gps accuracy of the device."""
|
||||
return self._data.get(ATTR_GPS_ACCURACY)
|
||||
|
||||
@property
|
||||
def latitude(self):
|
||||
"""Return latitude value of the device."""
|
||||
gps = self._data.get(ATTR_GPS)
|
||||
|
||||
if gps is None:
|
||||
return None
|
||||
|
||||
return gps[0]
|
||||
|
||||
@property
|
||||
def longitude(self):
|
||||
"""Return longitude value of the device."""
|
||||
gps = self._data.get(ATTR_GPS)
|
||||
|
||||
if gps is None:
|
||||
return None
|
||||
|
||||
return gps[1]
|
||||
|
||||
@property
|
||||
def location_name(self):
|
||||
"""Return a location name for the current location of the device."""
|
||||
return self._data.get(ATTR_LOCATION_NAME)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._entry.data[ATTR_DEVICE_NAME]
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def source_type(self):
|
||||
"""Return the source type, eg gps or router, of the device."""
|
||||
return SOURCE_TYPE_GPS
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return the device info."""
|
||||
return device_info(self._entry.data)
|
||||
|
||||
@callback
|
||||
def update_data(self, data):
|
||||
"""Mark the device as seen."""
|
||||
self._data = data
|
||||
self.async_write_ha_state()
|
||||
@@ -6,11 +6,11 @@ from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (ATTR_DEVICE_ID, ATTR_DEVICE_NAME, ATTR_MANUFACTURER,
|
||||
ATTR_MODEL, ATTR_OS_VERSION, ATTR_SENSOR_ATTRIBUTES,
|
||||
from .const import (ATTR_SENSOR_ATTRIBUTES,
|
||||
ATTR_SENSOR_DEVICE_CLASS, ATTR_SENSOR_ICON,
|
||||
ATTR_SENSOR_NAME, ATTR_SENSOR_TYPE, ATTR_SENSOR_UNIQUE_ID,
|
||||
DOMAIN, SIGNAL_SENSOR_UPDATE)
|
||||
from .helpers import device_info
|
||||
|
||||
|
||||
def sensor_id(webhook_id, unique_id):
|
||||
@@ -76,17 +76,7 @@ class MobileAppEntity(Entity):
|
||||
@property
|
||||
def device_info(self):
|
||||
"""Return device registry information for this entity."""
|
||||
return {
|
||||
'identifiers': {
|
||||
(ATTR_DEVICE_ID, self._registration[ATTR_DEVICE_ID]),
|
||||
(CONF_WEBHOOK_ID, self._registration[CONF_WEBHOOK_ID])
|
||||
},
|
||||
'manufacturer': self._registration[ATTR_MANUFACTURER],
|
||||
'model': self._registration[ATTR_MODEL],
|
||||
'device_name': self._registration[ATTR_DEVICE_NAME],
|
||||
'sw_version': self._registration[ATTR_OS_VERSION],
|
||||
'config_entries': self._device.config_entries
|
||||
}
|
||||
return device_info(self._registration)
|
||||
|
||||
async def async_update(self):
|
||||
"""Get the latest state of the sensor."""
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.core import Context
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from .const import (ATTR_APP_DATA, ATTR_APP_ID, ATTR_APP_NAME,
|
||||
from .const import (ATTR_APP_DATA, ATTR_APP_ID, ATTR_APP_NAME, ATTR_DEVICE_ID,
|
||||
ATTR_APP_VERSION, ATTR_DEVICE_NAME, ATTR_MANUFACTURER,
|
||||
ATTR_MODEL, ATTR_OS_VERSION, ATTR_SUPPORTS_ENCRYPTION,
|
||||
CONF_SECRET, CONF_USER_ID, DATA_BINARY_SENSOR,
|
||||
@@ -148,3 +148,16 @@ def webhook_response(data, *, registration: Dict, status: int = 200,
|
||||
|
||||
return Response(text=data, status=status, content_type='application/json',
|
||||
headers=headers)
|
||||
|
||||
|
||||
def device_info(registration: Dict) -> Dict:
|
||||
"""Return the device info for this registration."""
|
||||
return {
|
||||
'identifiers': {
|
||||
(DOMAIN, registration[ATTR_DEVICE_ID]),
|
||||
},
|
||||
'manufacturer': registration[ATTR_MANUFACTURER],
|
||||
'model': registration[ATTR_MODEL],
|
||||
'device_name': registration[ATTR_DEVICE_NAME],
|
||||
'sw_version': registration[ATTR_OS_VERSION],
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"PyNaCl==1.3.0"
|
||||
],
|
||||
"dependencies": [
|
||||
"device_tracker",
|
||||
"http",
|
||||
"webhook"
|
||||
],
|
||||
|
||||
@@ -6,10 +6,6 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.cloud import (async_remote_ui_url,
|
||||
CloudNotAvailable)
|
||||
from homeassistant.components.device_tracker import (ATTR_ATTRIBUTES,
|
||||
ATTR_DEV_ID,
|
||||
DOMAIN as DT_DOMAIN,
|
||||
SERVICE_SEE as DT_SEE)
|
||||
from homeassistant.components.frontend import MANIFEST_JSON
|
||||
from homeassistant.components.zone.const import DOMAIN as ZONE_DOMAIN
|
||||
|
||||
@@ -24,15 +20,12 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.template import attach
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import (ATTR_ALTITUDE, ATTR_BATTERY, ATTR_COURSE, ATTR_DEVICE_ID,
|
||||
from .const import (ATTR_DEVICE_ID,
|
||||
ATTR_DEVICE_NAME, ATTR_EVENT_DATA, ATTR_EVENT_TYPE,
|
||||
ATTR_GPS, ATTR_GPS_ACCURACY, ATTR_LOCATION_NAME,
|
||||
ATTR_MANUFACTURER, ATTR_MODEL, ATTR_OS_VERSION,
|
||||
ATTR_SENSOR_TYPE, ATTR_SENSOR_UNIQUE_ID, ATTR_SPEED,
|
||||
ATTR_SENSOR_TYPE, ATTR_SENSOR_UNIQUE_ID,
|
||||
ATTR_SUPPORTS_ENCRYPTION, ATTR_TEMPLATE,
|
||||
ATTR_TEMPLATE_VARIABLES, ATTR_VERTICAL_ACCURACY,
|
||||
ATTR_TEMPLATE_VARIABLES,
|
||||
ATTR_WEBHOOK_DATA, ATTR_WEBHOOK_ENCRYPTED,
|
||||
ATTR_WEBHOOK_ENCRYPTED_DATA, ATTR_WEBHOOK_TYPE,
|
||||
CONF_CLOUDHOOK_URL, CONF_REMOTE_UI_URL, CONF_SECRET,
|
||||
@@ -45,7 +38,7 @@ from .const import (ATTR_ALTITUDE, ATTR_BATTERY, ATTR_COURSE, ATTR_DEVICE_ID,
|
||||
WEBHOOK_TYPE_REGISTER_SENSOR, WEBHOOK_TYPE_RENDER_TEMPLATE,
|
||||
WEBHOOK_TYPE_UPDATE_LOCATION,
|
||||
WEBHOOK_TYPE_UPDATE_REGISTRATION,
|
||||
WEBHOOK_TYPE_UPDATE_SENSOR_STATES)
|
||||
WEBHOOK_TYPE_UPDATE_SENSOR_STATES, SIGNAL_LOCATION_UPDATE)
|
||||
|
||||
|
||||
from .helpers import (_decrypt_payload, empty_okay_response, error_response,
|
||||
@@ -151,37 +144,9 @@ async def handle_webhook(hass: HomeAssistantType, webhook_id: str,
|
||||
headers=headers)
|
||||
|
||||
if webhook_type == WEBHOOK_TYPE_UPDATE_LOCATION:
|
||||
see_payload = {
|
||||
ATTR_DEV_ID: slugify(registration[ATTR_DEVICE_NAME]),
|
||||
ATTR_GPS: data[ATTR_GPS],
|
||||
ATTR_GPS_ACCURACY: data[ATTR_GPS_ACCURACY],
|
||||
}
|
||||
|
||||
for key in (ATTR_LOCATION_NAME, ATTR_BATTERY):
|
||||
value = data.get(key)
|
||||
if value is not None:
|
||||
see_payload[key] = value
|
||||
|
||||
attrs = {}
|
||||
|
||||
for key in (ATTR_ALTITUDE, ATTR_COURSE,
|
||||
ATTR_SPEED, ATTR_VERTICAL_ACCURACY):
|
||||
value = data.get(key)
|
||||
if value is not None:
|
||||
attrs[key] = value
|
||||
|
||||
if attrs:
|
||||
see_payload[ATTR_ATTRIBUTES] = attrs
|
||||
|
||||
try:
|
||||
await hass.services.async_call(DT_DOMAIN,
|
||||
DT_SEE, see_payload,
|
||||
blocking=True, context=context)
|
||||
# noqa: E722 pylint: disable=broad-except
|
||||
except (vol.Invalid, ServiceNotFound, Exception) as ex:
|
||||
_LOGGER.error("Error when updating location during mobile_app "
|
||||
"webhook (device name: %s): %s",
|
||||
registration[ATTR_DEVICE_NAME], ex)
|
||||
hass.helpers.dispatcher.async_dispatcher_send(
|
||||
SIGNAL_LOCATION_UPDATE.format(config_entry.entry_id), data
|
||||
)
|
||||
return empty_okay_response(headers=headers)
|
||||
|
||||
if webhook_type == WEBHOOK_TYPE_UPDATE_REGISTRATION:
|
||||
|
||||
@@ -135,15 +135,15 @@ class Scanner:
|
||||
if not xml:
|
||||
resp = await session.get(xml_location, timeout=5)
|
||||
xml = await resp.text()
|
||||
except aiohttp.ClientError as err:
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.debug("Error fetching %s: %s", xml_location, err)
|
||||
return None
|
||||
return {}
|
||||
|
||||
try:
|
||||
tree = ElementTree.fromstring(xml)
|
||||
except ElementTree.ParseError as err:
|
||||
_LOGGER.debug("Error parsing %s: %s", xml_location, err)
|
||||
return None
|
||||
return {}
|
||||
|
||||
return util.etree_to_dict(tree).get('root', {}).get('device', {})
|
||||
|
||||
|
||||
@@ -41,6 +41,12 @@ def get_scanner(hass, config):
|
||||
should be gradually migrated in the pypi package
|
||||
|
||||
"""
|
||||
_LOGGER.warning("TP-Link device tracker is unmaintained and will be "
|
||||
"removed in the future releases if no maintainer is "
|
||||
"found. If you have interest in this integration, "
|
||||
"feel free to create a pull request to move this code "
|
||||
"to a new 'tplink_router' integration and refactoring "
|
||||
"the device-specific parts to the tplink library")
|
||||
for cls in [
|
||||
TplinkDeviceScanner, Tplink5DeviceScanner, Tplink4DeviceScanner,
|
||||
Tplink3DeviceScanner, Tplink2DeviceScanner, Tplink1DeviceScanner
|
||||
|
||||
@@ -10,7 +10,7 @@ import voluptuous as vol
|
||||
from zeroconf import ServiceBrowser, ServiceInfo, ServiceStateChange, Zeroconf
|
||||
|
||||
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
|
||||
from homeassistant.generated.zeroconf import ZEROCONF
|
||||
from homeassistant.generated.zeroconf import ZEROCONF, HOMEKIT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,6 +24,7 @@ ATTR_NAME = 'name'
|
||||
ATTR_PROPERTIES = 'properties'
|
||||
|
||||
ZEROCONF_TYPE = '_home-assistant._tcp.local.'
|
||||
HOMEKIT_TYPE = '_hap._tcp.local.'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({}),
|
||||
@@ -50,21 +51,30 @@ def setup(hass, config):
|
||||
|
||||
def service_update(zeroconf, service_type, name, state_change):
|
||||
"""Service state changed."""
|
||||
if state_change is ServiceStateChange.Added:
|
||||
service_info = zeroconf.get_service_info(service_type, name)
|
||||
info = info_from_service(service_info)
|
||||
_LOGGER.debug("Discovered new device %s %s", name, info)
|
||||
if state_change != ServiceStateChange.Added:
|
||||
return
|
||||
|
||||
for domain in ZEROCONF[service_type]:
|
||||
hass.add_job(
|
||||
hass.config_entries.flow.async_init(
|
||||
domain, context={'source': DOMAIN}, data=info
|
||||
)
|
||||
service_info = zeroconf.get_service_info(service_type, name)
|
||||
info = info_from_service(service_info)
|
||||
_LOGGER.debug("Discovered new device %s %s", name, info)
|
||||
|
||||
# If we can handle it as a HomeKit discovery, we do that here.
|
||||
if service_type == HOMEKIT_TYPE and handle_homekit(hass, info):
|
||||
return
|
||||
|
||||
for domain in ZEROCONF[service_type]:
|
||||
hass.add_job(
|
||||
hass.config_entries.flow.async_init(
|
||||
domain, context={'source': DOMAIN}, data=info
|
||||
)
|
||||
)
|
||||
|
||||
for service in ZEROCONF:
|
||||
ServiceBrowser(zeroconf, service, handlers=[service_update])
|
||||
|
||||
if HOMEKIT_TYPE not in ZEROCONF:
|
||||
ServiceBrowser(zeroconf, HOMEKIT_TYPE, handlers=[service_update])
|
||||
|
||||
def stop_zeroconf(_):
|
||||
"""Stop Zeroconf."""
|
||||
zeroconf.unregister_service(info)
|
||||
@@ -75,6 +85,36 @@ def setup(hass, config):
|
||||
return True
|
||||
|
||||
|
||||
def handle_homekit(hass, info) -> bool:
|
||||
"""Handle a HomeKit discovery.
|
||||
|
||||
Return if discovery was forwarded.
|
||||
"""
|
||||
model = None
|
||||
props = info.get('properties', {})
|
||||
|
||||
for key in props:
|
||||
if key.lower() == 'md':
|
||||
model = props[key]
|
||||
break
|
||||
|
||||
if model is None:
|
||||
return False
|
||||
|
||||
for test_model in HOMEKIT:
|
||||
if not model.startswith(test_model):
|
||||
continue
|
||||
|
||||
hass.add_job(
|
||||
hass.config_entries.flow.async_init(
|
||||
HOMEKIT[test_model], context={'source': 'homekit'}, data=info
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def info_from_service(service):
|
||||
"""Return prepared info from mDNS entries."""
|
||||
properties = {}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 94
|
||||
PATCH_VERSION = '0b1'
|
||||
PATCH_VERSION = '0b4'
|
||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||
REQUIRED_PYTHON_VER = (3, 5, 3)
|
||||
|
||||
@@ -18,3 +18,7 @@ ZEROCONF = {
|
||||
"homekit_controller"
|
||||
]
|
||||
}
|
||||
|
||||
HOMEKIT = {
|
||||
"LIFX ": "lifx"
|
||||
}
|
||||
|
||||
@@ -81,6 +81,10 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow):
|
||||
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async_step_zeroconf = async_step_discovery
|
||||
async_step_ssdp = async_step_discovery
|
||||
async_step_homekit = async_step_discovery
|
||||
|
||||
async def async_step_import(self, _):
|
||||
"""Handle a flow initialized by import."""
|
||||
if self._async_in_progress() or self._async_current_entries():
|
||||
|
||||
@@ -45,7 +45,7 @@ class EntityPlatform:
|
||||
self._async_unsub_polling = None
|
||||
# Method to cancel the retry of setup
|
||||
self._async_cancel_retry_setup = None
|
||||
self._process_updates = asyncio.Lock()
|
||||
self._process_updates = None
|
||||
|
||||
# Platform is None for the EntityComponent "catch-all" EntityPlatform
|
||||
# which powers entity_component.add_entities
|
||||
@@ -404,6 +404,8 @@ class EntityPlatform:
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
if self._process_updates is None:
|
||||
self._process_updates = asyncio.Lock()
|
||||
if self._process_updates.locked():
|
||||
self.logger.warning(
|
||||
"Updating %s %s took longer than the scheduled update "
|
||||
|
||||
@@ -44,12 +44,15 @@ async def async_process_requirements(hass: HomeAssistant, name: str,
|
||||
|
||||
def pip_kwargs(config_dir: Optional[str]) -> Dict[str, Any]:
|
||||
"""Return keyword arguments for PIP install."""
|
||||
is_docker = pkg_util.is_docker_env()
|
||||
kwargs = {
|
||||
'constraints': os.path.join(os.path.dirname(__file__), CONSTRAINT_FILE)
|
||||
'constraints': os.path.join(os.path.dirname(__file__),
|
||||
CONSTRAINT_FILE),
|
||||
'no_cache_dir': is_docker,
|
||||
}
|
||||
if 'WHEELS_LINKS' in os.environ:
|
||||
kwargs['find_links'] = os.environ['WHEELS_LINKS']
|
||||
if not (config_dir is None or pkg_util.is_virtual_env()) and \
|
||||
not pkg_util.is_docker_env():
|
||||
not is_docker:
|
||||
kwargs['target'] = os.path.join(config_dir, 'deps')
|
||||
return kwargs
|
||||
|
||||
@@ -49,7 +49,8 @@ def is_installed(package: str) -> bool:
|
||||
def install_package(package: str, upgrade: bool = True,
|
||||
target: Optional[str] = None,
|
||||
constraints: Optional[str] = None,
|
||||
find_links: Optional[str] = None) -> bool:
|
||||
find_links: Optional[str] = None,
|
||||
no_cache_dir: Optional[bool] = False) -> bool:
|
||||
"""Install a package on PyPi. Accepts pip compatible package strings.
|
||||
|
||||
Return boolean if install successful.
|
||||
@@ -58,6 +59,8 @@ def install_package(package: str, upgrade: bool = True,
|
||||
_LOGGER.info('Attempting install of %s', package)
|
||||
env = os.environ.copy()
|
||||
args = [sys.executable, '-m', 'pip', 'install', '--quiet', package]
|
||||
if no_cache_dir:
|
||||
args.append('--no-cache-dir')
|
||||
if upgrade:
|
||||
args.append('--upgrade')
|
||||
if constraints is not None:
|
||||
|
||||
@@ -212,7 +212,7 @@ av==6.1.2
|
||||
# avion==0.10
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==23
|
||||
axis==24
|
||||
|
||||
# homeassistant.components.azure_event_hub
|
||||
azure-eventhub==1.3.1
|
||||
@@ -453,7 +453,7 @@ fiblary3==0.1.7
|
||||
fints==1.0.1
|
||||
|
||||
# homeassistant.components.fitbit
|
||||
fitbit==0.3.0
|
||||
fitbit==0.3.1
|
||||
|
||||
# homeassistant.components.fixer
|
||||
fixerio==1.0.0a0
|
||||
@@ -577,7 +577,7 @@ hole==0.3.0
|
||||
holidays==0.9.10
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20190530.0
|
||||
home-assistant-frontend==20190601.0
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.4
|
||||
|
||||
@@ -70,7 +70,7 @@ apns2==0.3.0
|
||||
av==6.1.2
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==23
|
||||
axis==24
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows-homeassistant==0.7.3
|
||||
@@ -148,7 +148,7 @@ hdate==0.8.7
|
||||
holidays==0.9.10
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20190530.0
|
||||
home-assistant-frontend==20190601.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
homekit[IP]==0.14.0
|
||||
|
||||
@@ -17,6 +17,9 @@ MANIFEST_SCHEMA = vol.Schema({
|
||||
vol.Optional('manufacturer'): [str],
|
||||
vol.Optional('device_type'): [str],
|
||||
}),
|
||||
vol.Optional('homekit'): vol.Schema({
|
||||
vol.Optional('models'): [str],
|
||||
}),
|
||||
vol.Required('documentation'): str,
|
||||
vol.Required('requirements'): [str],
|
||||
vol.Required('dependencies'): [str],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Generate zeroconf file."""
|
||||
from collections import OrderedDict
|
||||
from collections import OrderedDict, defaultdict
|
||||
import json
|
||||
from typing import Dict
|
||||
|
||||
@@ -13,12 +13,15 @@ To update, run python3 -m hassfest
|
||||
|
||||
|
||||
ZEROCONF = {}
|
||||
|
||||
HOMEKIT = {}
|
||||
""".strip()
|
||||
|
||||
|
||||
def generate_and_validate(integrations: Dict[str, Integration]):
|
||||
"""Validate and generate zeroconf data."""
|
||||
service_type_dict = {}
|
||||
service_type_dict = defaultdict(list)
|
||||
homekit_dict = {}
|
||||
|
||||
for domain in sorted(integrations):
|
||||
integration = integrations[domain]
|
||||
@@ -26,17 +29,30 @@ def generate_and_validate(integrations: Dict[str, Integration]):
|
||||
if not integration.manifest:
|
||||
continue
|
||||
|
||||
service_types = integration.manifest.get('zeroconf')
|
||||
service_types = integration.manifest.get('zeroconf', [])
|
||||
homekit = integration.manifest.get('homekit', {})
|
||||
homekit_models = homekit.get('models', [])
|
||||
|
||||
if not service_types:
|
||||
if not service_types and not homekit_models:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(str(integration.path / "config_flow.py")) as fp:
|
||||
if ' async_step_zeroconf(' not in fp.read():
|
||||
content = fp.read()
|
||||
uses_discovery_flow = 'register_discovery_flow' in content
|
||||
|
||||
if (service_types and not uses_discovery_flow and
|
||||
' async_step_zeroconf(' not in content):
|
||||
integration.add_error(
|
||||
'zeroconf', 'Config flow has no async_step_zeroconf')
|
||||
continue
|
||||
|
||||
if (homekit_models and not uses_discovery_flow and
|
||||
' async_step_homekit(' not in content):
|
||||
integration.add_error(
|
||||
'zeroconf', 'Config flow has no async_step_homekit')
|
||||
continue
|
||||
|
||||
except FileNotFoundError:
|
||||
integration.add_error(
|
||||
'zeroconf',
|
||||
@@ -45,16 +61,50 @@ def generate_and_validate(integrations: Dict[str, Integration]):
|
||||
continue
|
||||
|
||||
for service_type in service_types:
|
||||
|
||||
if service_type not in service_type_dict:
|
||||
service_type_dict[service_type] = []
|
||||
|
||||
service_type_dict[service_type].append(domain)
|
||||
|
||||
data = OrderedDict((key, service_type_dict[key])
|
||||
for key in sorted(service_type_dict))
|
||||
for model in homekit_models:
|
||||
# We add a space, as we want to test for it to be model + space.
|
||||
model += " "
|
||||
|
||||
return BASE.format(json.dumps(data, indent=4))
|
||||
if model in homekit_dict:
|
||||
integration.add_error(
|
||||
'zeroconf',
|
||||
'Integrations {} and {} have overlapping HomeKit '
|
||||
'models'.format(domain, homekit_dict[model]))
|
||||
break
|
||||
|
||||
homekit_dict[model] = domain
|
||||
|
||||
# HomeKit models are matched on starting string, make sure none overlap.
|
||||
warned = set()
|
||||
for key in homekit_dict:
|
||||
if key in warned:
|
||||
continue
|
||||
|
||||
# n^2 yoooo
|
||||
for key_2 in homekit_dict:
|
||||
if key == key_2 or key_2 in warned:
|
||||
continue
|
||||
|
||||
if key.startswith(key_2) or key_2.startswith(key):
|
||||
integration.add_error(
|
||||
'zeroconf',
|
||||
'Integrations {} and {} have overlapping HomeKit '
|
||||
'models'.format(homekit_dict[key], homekit_dict[key_2]))
|
||||
warned.add(key)
|
||||
warned.add(key_2)
|
||||
break
|
||||
|
||||
zeroconf = OrderedDict((key, service_type_dict[key])
|
||||
for key in sorted(service_type_dict))
|
||||
homekit = OrderedDict((key, homekit_dict[key])
|
||||
for key in sorted(homekit_dict))
|
||||
|
||||
return BASE.format(
|
||||
json.dumps(zeroconf, indent=4),
|
||||
json.dumps(homekit, indent=4),
|
||||
)
|
||||
|
||||
|
||||
def validate(integrations: Dict[str, Integration], config: Config):
|
||||
|
||||
@@ -8,8 +8,7 @@ import pytest
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.components.frontend import (
|
||||
DOMAIN, CONF_JS_VERSION, CONF_THEMES, CONF_EXTRA_HTML_URL,
|
||||
CONF_EXTRA_HTML_URL_ES5, generate_negative_index_regex,
|
||||
EVENT_PANELS_UPDATED)
|
||||
CONF_EXTRA_HTML_URL_ES5, EVENT_PANELS_UPDATED)
|
||||
from homeassistant.components.websocket_api.const import TYPE_RESULT
|
||||
|
||||
from tests.common import mock_coro, async_capture_events
|
||||
@@ -348,43 +347,3 @@ async def test_auth_authorize(mock_http_client):
|
||||
resp = await mock_http_client.get(authorizejs.groups(0)[0])
|
||||
assert resp.status == 200
|
||||
assert 'public' in resp.headers.get('cache-control')
|
||||
|
||||
|
||||
def test_index_regex():
|
||||
"""Test the index regex."""
|
||||
pattern = re.compile('/' + generate_negative_index_regex())
|
||||
|
||||
for should_match in (
|
||||
'/',
|
||||
'/lovelace',
|
||||
'/lovelace/default_view',
|
||||
'/map',
|
||||
'/config',
|
||||
):
|
||||
assert pattern.match(should_match), should_match
|
||||
|
||||
for should_not_match in (
|
||||
'/service_worker.js',
|
||||
'/manifest.json',
|
||||
'/onboarding.html',
|
||||
'/manifest.json',
|
||||
'static',
|
||||
'static/',
|
||||
'static/index.html',
|
||||
'frontend_latest',
|
||||
'frontend_latest/',
|
||||
'frontend_latest/index.html',
|
||||
'frontend_es5',
|
||||
'frontend_es5/',
|
||||
'frontend_es5/index.html',
|
||||
'local',
|
||||
'local/',
|
||||
'local/index.html',
|
||||
'auth',
|
||||
'auth/',
|
||||
'auth/index.html',
|
||||
'/api',
|
||||
'/api/',
|
||||
'/api/logbook',
|
||||
):
|
||||
assert not pattern.match(should_not_match), should_not_match
|
||||
|
||||
@@ -217,6 +217,12 @@ async def test_gps_enter_and_exit_home(hass, geofency_client, webhook_id):
|
||||
'device_tracker', device_name)).attributes['longitude']
|
||||
assert NOT_HOME_LONGITUDE == current_longitude
|
||||
|
||||
dev_reg = await hass.helpers.device_registry.async_get_registry()
|
||||
assert len(dev_reg.devices) == 1
|
||||
|
||||
ent_reg = await hass.helpers.entity_registry.async_get_registry()
|
||||
assert len(ent_reg.entities) == 1
|
||||
|
||||
|
||||
async def test_beacon_enter_and_exit_home(hass, geofency_client, webhook_id):
|
||||
"""Test iBeacon based zone enter and exit - a.k.a stationary iBeacon."""
|
||||
|
||||
@@ -140,6 +140,12 @@ async def test_enter_and_exit(hass, gpslogger_client, webhook_id):
|
||||
data['device'])).state
|
||||
assert STATE_NOT_HOME == state_name
|
||||
|
||||
dev_reg = await hass.helpers.device_registry.async_get_registry()
|
||||
assert len(dev_reg.devices) == 1
|
||||
|
||||
ent_reg = await hass.helpers.entity_registry.async_get_registry()
|
||||
assert len(ent_reg.entities) == 1
|
||||
|
||||
|
||||
async def test_enter_with_attrs(hass, gpslogger_client, webhook_id):
|
||||
"""Test when additional attributes are present."""
|
||||
@@ -172,6 +178,33 @@ async def test_enter_with_attrs(hass, gpslogger_client, webhook_id):
|
||||
assert state.attributes['provider'] == 'gps'
|
||||
assert state.attributes['activity'] == 'running'
|
||||
|
||||
data = {
|
||||
'latitude': HOME_LATITUDE,
|
||||
'longitude': HOME_LONGITUDE,
|
||||
'device': '123',
|
||||
'accuracy': 123,
|
||||
'battery': 23,
|
||||
'speed': 23,
|
||||
'direction': 123,
|
||||
'altitude': 123,
|
||||
'provider': 'gps',
|
||||
'activity': 'idle'
|
||||
}
|
||||
|
||||
req = await gpslogger_client.post(url, data=data)
|
||||
await hass.async_block_till_done()
|
||||
assert req.status == HTTP_OK
|
||||
state = hass.states.get('{}.{}'.format(DEVICE_TRACKER_DOMAIN,
|
||||
data['device']))
|
||||
assert state.state == STATE_HOME
|
||||
assert state.attributes['gps_accuracy'] == 123
|
||||
assert state.attributes['battery_level'] == 23
|
||||
assert state.attributes['speed'] == 23
|
||||
assert state.attributes['direction'] == 123
|
||||
assert state.attributes['altitude'] == 123
|
||||
assert state.attributes['provider'] == 'gps'
|
||||
assert state.attributes['activity'] == 'idle'
|
||||
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason='The device_tracker component does not support unloading yet.'
|
||||
|
||||
@@ -1,74 +1 @@
|
||||
"""Tests for mobile_app component."""
|
||||
# pylint: disable=redefined-outer-name,unused-import
|
||||
import pytest
|
||||
|
||||
from tests.common import mock_device_registry
|
||||
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from homeassistant.components.mobile_app.const import (DATA_BINARY_SENSOR,
|
||||
DATA_DELETED_IDS,
|
||||
DATA_SENSOR,
|
||||
DOMAIN,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION)
|
||||
|
||||
from .const import REGISTER, REGISTER_CLEARTEXT
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def registry(hass):
|
||||
"""Return a configured device registry."""
|
||||
return mock_device_registry(hass)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def create_registrations(authed_api_client):
|
||||
"""Return two new registrations."""
|
||||
enc_reg = await authed_api_client.post(
|
||||
'/api/mobile_app/registrations', json=REGISTER
|
||||
)
|
||||
|
||||
assert enc_reg.status == 201
|
||||
enc_reg_json = await enc_reg.json()
|
||||
|
||||
clear_reg = await authed_api_client.post(
|
||||
'/api/mobile_app/registrations', json=REGISTER_CLEARTEXT
|
||||
)
|
||||
|
||||
assert clear_reg.status == 201
|
||||
clear_reg_json = await clear_reg.json()
|
||||
|
||||
return (enc_reg_json, clear_reg_json)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def webhook_client(hass, aiohttp_client, hass_storage, hass_admin_user):
|
||||
"""mobile_app mock client."""
|
||||
hass_storage[STORAGE_KEY] = {
|
||||
'version': STORAGE_VERSION,
|
||||
'data': {
|
||||
DATA_BINARY_SENSOR: {},
|
||||
DATA_DELETED_IDS: [],
|
||||
DATA_SENSOR: {}
|
||||
}
|
||||
}
|
||||
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
return await aiohttp_client(hass.http.app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def authed_api_client(hass, hass_client):
|
||||
"""Provide an authenticated client for mobile_app to use."""
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
return await hass_client()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_ws(hass):
|
||||
"""Configure the websocket_api component."""
|
||||
assert await async_setup_component(hass, 'websocket_api', {})
|
||||
await hass.async_block_till_done()
|
||||
"""Tests for the mobile app integration."""
|
||||
|
||||
60
tests/components/mobile_app/conftest.py
Normal file
60
tests/components/mobile_app/conftest.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Tests for mobile_app component."""
|
||||
# pylint: disable=redefined-outer-name,unused-import
|
||||
import pytest
|
||||
|
||||
from tests.common import mock_device_registry
|
||||
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from homeassistant.components.mobile_app.const import DOMAIN
|
||||
|
||||
from .const import REGISTER, REGISTER_CLEARTEXT
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def registry(hass):
|
||||
"""Return a configured device registry."""
|
||||
return mock_device_registry(hass)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def create_registrations(authed_api_client):
|
||||
"""Return two new registrations."""
|
||||
enc_reg = await authed_api_client.post(
|
||||
'/api/mobile_app/registrations', json=REGISTER
|
||||
)
|
||||
|
||||
assert enc_reg.status == 201
|
||||
enc_reg_json = await enc_reg.json()
|
||||
|
||||
clear_reg = await authed_api_client.post(
|
||||
'/api/mobile_app/registrations', json=REGISTER_CLEARTEXT
|
||||
)
|
||||
|
||||
assert clear_reg.status == 201
|
||||
clear_reg_json = await clear_reg.json()
|
||||
|
||||
return (enc_reg_json, clear_reg_json)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def webhook_client(hass, aiohttp_client):
|
||||
"""mobile_app mock client."""
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
return await aiohttp_client(hass.http.app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def authed_api_client(hass, hass_client):
|
||||
"""Provide an authenticated client for mobile_app to use."""
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
await hass.async_block_till_done()
|
||||
return await hass_client()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_ws(hass):
|
||||
"""Configure the websocket_api component."""
|
||||
assert await async_setup_component(hass, 'websocket_api', {})
|
||||
await hass.async_block_till_done()
|
||||
68
tests/components/mobile_app/test_device_tracker.py
Normal file
68
tests/components/mobile_app/test_device_tracker.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""Test mobile app device tracker."""
|
||||
|
||||
|
||||
async def test_sending_location(hass, create_registrations, webhook_client):
|
||||
"""Test sending a location via a webhook."""
|
||||
resp = await webhook_client.post(
|
||||
'/api/webhook/{}'.format(create_registrations[1]['webhook_id']),
|
||||
json={
|
||||
'type': 'update_location',
|
||||
'data': {
|
||||
'gps': [10, 20],
|
||||
'gps_accuracy': 30,
|
||||
'battery': 40,
|
||||
'altitude': 50,
|
||||
'course': 60,
|
||||
'speed': 70,
|
||||
'vertical_accuracy': 80,
|
||||
'location_name': 'bar',
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
assert resp.status == 200
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get('device_tracker.test_1')
|
||||
assert state is not None
|
||||
assert state.name == 'Test 1'
|
||||
assert state.state == 'bar'
|
||||
assert state.attributes['source_type'] == 'gps'
|
||||
assert state.attributes['latitude'] == 10
|
||||
assert state.attributes['longitude'] == 20
|
||||
assert state.attributes['gps_accuracy'] == 30
|
||||
assert state.attributes['battery_level'] == 40
|
||||
assert state.attributes['altitude'] == 50
|
||||
assert state.attributes['course'] == 60
|
||||
assert state.attributes['speed'] == 70
|
||||
assert state.attributes['vertical_accuracy'] == 80
|
||||
|
||||
resp = await webhook_client.post(
|
||||
'/api/webhook/{}'.format(create_registrations[1]['webhook_id']),
|
||||
json={
|
||||
'type': 'update_location',
|
||||
'data': {
|
||||
'gps': [1, 2],
|
||||
'gps_accuracy': 3,
|
||||
'battery': 4,
|
||||
'altitude': 5,
|
||||
'course': 6,
|
||||
'speed': 7,
|
||||
'vertical_accuracy': 8,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
assert resp.status == 200
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get('device_tracker.test_1')
|
||||
assert state is not None
|
||||
assert state.state == 'not_home'
|
||||
assert state.attributes['source_type'] == 'gps'
|
||||
assert state.attributes['latitude'] == 1
|
||||
assert state.attributes['longitude'] == 2
|
||||
assert state.attributes['gps_accuracy'] == 3
|
||||
assert state.attributes['battery_level'] == 4
|
||||
assert state.attributes['altitude'] == 5
|
||||
assert state.attributes['course'] == 6
|
||||
assert state.attributes['speed'] == 7
|
||||
assert state.attributes['vertical_accuracy'] == 8
|
||||
@@ -2,9 +2,6 @@
|
||||
# pylint: disable=redefined-outer-name,unused-import
|
||||
import logging
|
||||
|
||||
from . import (authed_api_client, create_registrations, # noqa: F401
|
||||
webhook_client) # noqa: F401
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -7,10 +7,9 @@ from homeassistant.const import CONF_WEBHOOK_ID
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .const import REGISTER, RENDER_TEMPLATE
|
||||
from . import authed_api_client # noqa: F401
|
||||
|
||||
|
||||
async def test_registration(hass, hass_client): # noqa: F811
|
||||
async def test_registration(hass, hass_client):
|
||||
"""Test that registrations happen."""
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
|
||||
@@ -11,17 +11,14 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import async_mock_service
|
||||
|
||||
from . import (authed_api_client, create_registrations, # noqa: F401
|
||||
webhook_client) # noqa: F401
|
||||
|
||||
from .const import (CALL_SERVICE, FIRE_EVENT, REGISTER_CLEARTEXT,
|
||||
RENDER_TEMPLATE, UPDATE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def test_webhook_handle_render_template(create_registrations, # noqa: F401, F811, E501
|
||||
webhook_client): # noqa: F811
|
||||
async def test_webhook_handle_render_template(create_registrations,
|
||||
webhook_client):
|
||||
"""Test that we render templates properly."""
|
||||
resp = await webhook_client.post(
|
||||
'/api/webhook/{}'.format(create_registrations[1]['webhook_id']),
|
||||
@@ -34,7 +31,7 @@ async def test_webhook_handle_render_template(create_registrations, # noqa: F40
|
||||
assert json == {'one': 'Hello world'}
|
||||
|
||||
|
||||
async def test_webhook_handle_call_services(hass, create_registrations, # noqa: F401, F811, E501
|
||||
async def test_webhook_handle_call_services(hass, create_registrations,
|
||||
webhook_client): # noqa: E501 F811
|
||||
"""Test that we call services properly."""
|
||||
calls = async_mock_service(hass, 'test', 'mobile_app')
|
||||
@@ -49,8 +46,8 @@ async def test_webhook_handle_call_services(hass, create_registrations, # noqa:
|
||||
assert len(calls) == 1
|
||||
|
||||
|
||||
async def test_webhook_handle_fire_event(hass, create_registrations, # noqa: F401, F811, E501
|
||||
webhook_client): # noqa: F811
|
||||
async def test_webhook_handle_fire_event(hass, create_registrations,
|
||||
webhook_client):
|
||||
"""Test that we can fire events."""
|
||||
events = []
|
||||
|
||||
@@ -76,7 +73,7 @@ async def test_webhook_handle_fire_event(hass, create_registrations, # noqa: F4
|
||||
|
||||
async def test_webhook_update_registration(webhook_client, hass_client): # noqa: E501 F811
|
||||
"""Test that a we can update an existing registration via webhook."""
|
||||
authed_api_client = await hass_client() # noqa: F811
|
||||
authed_api_client = await hass_client()
|
||||
register_resp = await authed_api_client.post(
|
||||
'/api/mobile_app/registrations', json=REGISTER_CLEARTEXT
|
||||
)
|
||||
@@ -102,8 +99,8 @@ async def test_webhook_update_registration(webhook_client, hass_client): # noqa
|
||||
assert CONF_SECRET not in update_json
|
||||
|
||||
|
||||
async def test_webhook_handle_get_zones(hass, create_registrations, # noqa: F401, F811, E501
|
||||
webhook_client): # noqa: F811
|
||||
async def test_webhook_handle_get_zones(hass, create_registrations,
|
||||
webhook_client):
|
||||
"""Test that we can get zones properly."""
|
||||
await async_setup_component(hass, ZONE_DOMAIN, {
|
||||
ZONE_DOMAIN: {
|
||||
@@ -126,8 +123,8 @@ async def test_webhook_handle_get_zones(hass, create_registrations, # noqa: F40
|
||||
assert json[0]['entity_id'] == 'zone.home'
|
||||
|
||||
|
||||
async def test_webhook_handle_get_config(hass, create_registrations, # noqa: F401, F811, E501
|
||||
webhook_client): # noqa: F811
|
||||
async def test_webhook_handle_get_config(hass, create_registrations,
|
||||
webhook_client):
|
||||
"""Test that we can get config properly."""
|
||||
resp = await webhook_client.post(
|
||||
'/api/webhook/{}'.format(create_registrations[1]['webhook_id']),
|
||||
@@ -160,8 +157,8 @@ async def test_webhook_handle_get_config(hass, create_registrations, # noqa: F4
|
||||
assert expected_dict == json
|
||||
|
||||
|
||||
async def test_webhook_returns_error_incorrect_json(webhook_client, # noqa: F401, F811, E501
|
||||
create_registrations, # noqa: F401, F811, E501
|
||||
async def test_webhook_returns_error_incorrect_json(webhook_client,
|
||||
create_registrations,
|
||||
caplog): # noqa: E501 F811
|
||||
"""Test that an error is returned when JSON is invalid."""
|
||||
resp = await webhook_client.post(
|
||||
@@ -175,8 +172,8 @@ async def test_webhook_returns_error_incorrect_json(webhook_client, # noqa: F40
|
||||
assert 'invalid JSON' in caplog.text
|
||||
|
||||
|
||||
async def test_webhook_handle_decryption(webhook_client, # noqa: F811
|
||||
create_registrations): # noqa: F401, F811, E501
|
||||
async def test_webhook_handle_decryption(webhook_client,
|
||||
create_registrations):
|
||||
"""Test that we can encrypt/decrypt properly."""
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
@@ -221,8 +218,8 @@ async def test_webhook_handle_decryption(webhook_client, # noqa: F811
|
||||
assert json.loads(decrypted_data) == {'one': 'Hello world'}
|
||||
|
||||
|
||||
async def test_webhook_requires_encryption(webhook_client, # noqa: F811
|
||||
create_registrations): # noqa: F401, F811, E501
|
||||
async def test_webhook_requires_encryption(webhook_client,
|
||||
create_registrations):
|
||||
"""Test that encrypted registrations only accept encrypted data."""
|
||||
resp = await webhook_client.post(
|
||||
'/api/webhook/{}'.format(create_registrations[0]['webhook_id']),
|
||||
|
||||
@@ -5,7 +5,6 @@ from homeassistant.components.websocket_api.const import TYPE_RESULT
|
||||
from homeassistant.const import CONF_WEBHOOK_ID
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import authed_api_client, setup_ws, webhook_client # noqa: F401
|
||||
from .const import (CALL_SERVICE, REGISTER)
|
||||
|
||||
|
||||
@@ -45,7 +44,7 @@ async def test_webocket_get_user_registrations(hass, aiohttp_client,
|
||||
|
||||
|
||||
async def test_webocket_delete_registration(hass, hass_client,
|
||||
hass_ws_client, webhook_client): # noqa: E501 F811
|
||||
hass_ws_client, webhook_client):
|
||||
"""Test delete_registration websocket command."""
|
||||
authed_api_client = await hass_client() # noqa: F811
|
||||
register_resp = await authed_api_client.post(
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""Test the SSDP integration."""
|
||||
import asyncio
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
|
||||
from homeassistant.generated import ssdp as gn_ssdp
|
||||
from homeassistant.components import ssdp
|
||||
|
||||
@@ -76,3 +80,28 @@ async def test_scan_match_device_type(hass, aioclient_mock):
|
||||
assert len(mock_init.mock_calls) == 1
|
||||
assert mock_init.mock_calls[0][1][0] == 'mock-domain'
|
||||
assert mock_init.mock_calls[0][2]['context'] == {'source': 'ssdp'}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('exc', [asyncio.TimeoutError, aiohttp.ClientError])
|
||||
async def test_scan_description_fetch_fail(hass, aioclient_mock, exc):
|
||||
"""Test failing to fetch description."""
|
||||
aioclient_mock.get('http://1.1.1.1', exc=exc)
|
||||
scanner = ssdp.Scanner(hass)
|
||||
|
||||
with patch('netdisco.ssdp.scan', return_value=[
|
||||
Mock(st="mock-st", location='http://1.1.1.1')
|
||||
]):
|
||||
await scanner.async_scan(None)
|
||||
|
||||
|
||||
async def test_scan_description_parse_fail(hass, aioclient_mock):
|
||||
"""Test invalid XML."""
|
||||
aioclient_mock.get('http://1.1.1.1', text="""
|
||||
<root>INVALIDXML
|
||||
""")
|
||||
scanner = ssdp.Scanner(hass)
|
||||
|
||||
with patch('netdisco.ssdp.scan', return_value=[
|
||||
Mock(st="mock-st", location='http://1.1.1.1')
|
||||
]):
|
||||
await scanner.async_scan(None)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Test Zeroconf component setup process."""
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from zeroconf import ServiceInfo, ServiceStateChange
|
||||
|
||||
from homeassistant.generated import zeroconf as zc_gen
|
||||
@@ -8,6 +9,13 @@ from homeassistant.setup import async_setup_component
|
||||
from homeassistant.components import zeroconf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_zeroconf():
|
||||
"""Mock zeroconf."""
|
||||
with patch('homeassistant.components.zeroconf.Zeroconf') as mock_zc:
|
||||
yield mock_zc.return_value
|
||||
|
||||
|
||||
def service_update_mock(zeroconf, service, handlers):
|
||||
"""Call service update handler."""
|
||||
handlers[0](
|
||||
@@ -23,18 +31,44 @@ def get_service_info_mock(service_type, name):
|
||||
properties={b'macaddress': b'ABCDEF012345'})
|
||||
|
||||
|
||||
async def test_setup(hass):
|
||||
def get_homekit_info_mock(service_type, name):
|
||||
"""Return homekit info for get_service_info."""
|
||||
return ServiceInfo(
|
||||
service_type, name, address=b'\n\x00\x00\x14', port=80, weight=0,
|
||||
priority=0, server='name.local.',
|
||||
properties={b'md': b'LIFX Bulb'})
|
||||
|
||||
|
||||
async def test_setup(hass, mock_zeroconf):
|
||||
"""Test configured options for a device are loaded via config entry."""
|
||||
with patch.object(hass.config_entries, 'flow') as mock_config_flow, \
|
||||
patch.object(zeroconf, 'ServiceBrowser') as MockServiceBrowser, \
|
||||
patch.object(zeroconf.Zeroconf, 'get_service_info') as \
|
||||
mock_get_service_info:
|
||||
|
||||
MockServiceBrowser.side_effect = service_update_mock
|
||||
mock_get_service_info.side_effect = get_service_info_mock
|
||||
|
||||
with patch.object(
|
||||
hass.config_entries, 'flow'
|
||||
) as mock_config_flow, patch.object(
|
||||
zeroconf, 'ServiceBrowser', side_effect=service_update_mock
|
||||
) as mock_service_browser:
|
||||
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
|
||||
assert await async_setup_component(
|
||||
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
|
||||
|
||||
assert len(MockServiceBrowser.mock_calls) == len(zc_gen.ZEROCONF)
|
||||
assert len(mock_service_browser.mock_calls) == len(zc_gen.ZEROCONF)
|
||||
assert len(mock_config_flow.mock_calls) == len(zc_gen.ZEROCONF) * 2
|
||||
|
||||
|
||||
async def test_homekit(hass, mock_zeroconf):
|
||||
"""Test configured options for a device are loaded via config entry."""
|
||||
with patch.dict(
|
||||
zc_gen.ZEROCONF, {
|
||||
zeroconf.HOMEKIT_TYPE: ["homekit_controller"]
|
||||
}, clear=True
|
||||
), patch.object(
|
||||
hass.config_entries, 'flow'
|
||||
) as mock_config_flow, patch.object(
|
||||
zeroconf, 'ServiceBrowser', side_effect=service_update_mock
|
||||
) as mock_service_browser:
|
||||
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock
|
||||
assert await async_setup_component(
|
||||
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
|
||||
|
||||
assert len(mock_service_browser.mock_calls) == 1
|
||||
assert len(mock_config_flow.mock_calls) == 2
|
||||
assert mock_config_flow.mock_calls[0][1][0] == 'lifx'
|
||||
|
||||
@@ -75,24 +75,26 @@ async def test_user_has_confirmation(hass, discovery_flow_conf):
|
||||
assert result['type'] == data_entry_flow.RESULT_TYPE_FORM
|
||||
|
||||
|
||||
async def test_discovery_single_instance(hass, discovery_flow_conf):
|
||||
"""Test we ask for confirmation via discovery."""
|
||||
@pytest.mark.parametrize('source', ['discovery', 'ssdp', 'zeroconf'])
|
||||
async def test_discovery_single_instance(hass, discovery_flow_conf, source):
|
||||
"""Test we not allow duplicates."""
|
||||
flow = config_entries.HANDLERS['test']()
|
||||
flow.hass = hass
|
||||
|
||||
MockConfigEntry(domain='test').add_to_hass(hass)
|
||||
result = await flow.async_step_discovery({})
|
||||
result = await getattr(flow, "async_step_{}".format(source))({})
|
||||
|
||||
assert result['type'] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result['reason'] == 'single_instance_allowed'
|
||||
|
||||
|
||||
async def test_discovery_confirmation(hass, discovery_flow_conf):
|
||||
@pytest.mark.parametrize('source', ['discovery', 'ssdp', 'zeroconf'])
|
||||
async def test_discovery_confirmation(hass, discovery_flow_conf, source):
|
||||
"""Test we ask for confirmation via discovery."""
|
||||
flow = config_entries.HANDLERS['test']()
|
||||
flow.hass = hass
|
||||
|
||||
result = await flow.async_step_discovery({})
|
||||
result = await getattr(flow, "async_step_{}".format(source))({})
|
||||
|
||||
assert result['type'] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result['step_id'] == 'confirm'
|
||||
|
||||
@@ -30,9 +30,8 @@ class TestRequirements:
|
||||
@patch('homeassistant.util.package.is_docker_env', return_value=False)
|
||||
@patch('homeassistant.util.package.install_package', return_value=True)
|
||||
def test_requirement_installed_in_venv(
|
||||
self, mock_install, mock_venv, mock_denv, mock_dirname):
|
||||
self, mock_install, mock_denv, mock_venv, mock_dirname):
|
||||
"""Test requirement installed in virtual environment."""
|
||||
mock_venv.return_value = True
|
||||
mock_dirname.return_value = 'ha_package_path'
|
||||
self.hass.config.skip_pip = False
|
||||
mock_integration(
|
||||
@@ -42,14 +41,16 @@ class TestRequirements:
|
||||
assert 'comp' in self.hass.config.components
|
||||
assert mock_install.call_args == call(
|
||||
'package==0.0.1',
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE))
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE),
|
||||
no_cache_dir=False,
|
||||
)
|
||||
|
||||
@patch('os.path.dirname')
|
||||
@patch('homeassistant.util.package.is_virtual_env', return_value=False)
|
||||
@patch('homeassistant.util.package.is_docker_env', return_value=False)
|
||||
@patch('homeassistant.util.package.install_package', return_value=True)
|
||||
def test_requirement_installed_in_deps(
|
||||
self, mock_install, mock_venv, mock_denv, mock_dirname):
|
||||
self, mock_install, mock_denv, mock_venv, mock_dirname):
|
||||
"""Test requirement installed in deps directory."""
|
||||
mock_dirname.return_value = 'ha_package_path'
|
||||
self.hass.config.skip_pip = False
|
||||
@@ -60,7 +61,9 @@ class TestRequirements:
|
||||
assert 'comp' in self.hass.config.components
|
||||
assert mock_install.call_args == call(
|
||||
'package==0.0.1', target=self.hass.config.path('deps'),
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE))
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE),
|
||||
no_cache_dir=False,
|
||||
)
|
||||
|
||||
|
||||
async def test_install_existing_package(hass):
|
||||
@@ -108,7 +111,9 @@ async def test_install_with_wheels_index(hass):
|
||||
print(mock_inst.call_args)
|
||||
assert mock_inst.call_args == call(
|
||||
'hello==1.0.0', find_links="https://wheels.hass.io/test",
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE))
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE),
|
||||
no_cache_dir=True,
|
||||
)
|
||||
|
||||
|
||||
async def test_install_on_docker(hass):
|
||||
@@ -135,4 +140,6 @@ async def test_install_on_docker(hass):
|
||||
print(mock_inst.call_args)
|
||||
assert mock_inst.call_args == call(
|
||||
'hello==1.0.0',
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE))
|
||||
constraints=os.path.join('ha_package_path', CONSTRAINT_FILE),
|
||||
no_cache_dir=True,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user