mirror of
https://github.com/home-assistant/core.git
synced 2026-01-04 14:55:39 +01:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa29eeba04 | ||
|
|
fe47253f68 | ||
|
|
234c759b45 | ||
|
|
ba4d4bcd29 | ||
|
|
d7bbdb033d | ||
|
|
63fd5f2d31 | ||
|
|
c78d3b6154 | ||
|
|
dcfe7b2280 | ||
|
|
6330bb1004 | ||
|
|
672a896124 | ||
|
|
567e3e6e50 | ||
|
|
259915eee9 | ||
|
|
d7859b5900 | ||
|
|
ebcae2503c | ||
|
|
77ee2f1f3e | ||
|
|
761385dea1 | ||
|
|
2b2809a4c6 | ||
|
|
3c32bfda95 | ||
|
|
65d9460e09 | ||
|
|
2b542b7789 | ||
|
|
bbdb7a6f4c | ||
|
|
e049b35413 | ||
|
|
cdcc535ae1 | ||
|
|
4662ab215c | ||
|
|
80aa2075c6 | ||
|
|
4b7d944a74 | ||
|
|
b7218e6a1d | ||
|
|
6d0ac30687 | ||
|
|
0ceace96e7 | ||
|
|
ec7f2657cd | ||
|
|
5945929e7e |
@@ -18,6 +18,7 @@ from homeassistant.util.logging import AsyncHandler
|
||||
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
||||
from homeassistant.util.yaml import clear_secret_cache
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -153,6 +154,34 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
stop = time()
|
||||
_LOGGER.info("Home Assistant initialized in %.2fs", stop-start)
|
||||
|
||||
# TEMP: warn users for invalid slugs
|
||||
# Remove after 0.94 or 1.0
|
||||
if cv.INVALID_SLUGS_FOUND or cv.INVALID_ENTITY_IDS_FOUND:
|
||||
msg = []
|
||||
|
||||
if cv.INVALID_ENTITY_IDS_FOUND:
|
||||
msg.append(
|
||||
"Your configuration contains invalid entity ID references. "
|
||||
"Please find and update the following. "
|
||||
"This will become a breaking change."
|
||||
)
|
||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
||||
for item
|
||||
in cv.INVALID_ENTITY_IDS_FOUND.items()))
|
||||
|
||||
if cv.INVALID_SLUGS_FOUND:
|
||||
msg.append(
|
||||
"Your configuration contains invalid slugs. "
|
||||
"Please find and update the following. "
|
||||
"This will become a breaking change."
|
||||
)
|
||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
||||
for item in cv.INVALID_SLUGS_FOUND.items()))
|
||||
|
||||
hass.components.persistent_notification.async_create(
|
||||
'\n\n'.join(msg), "Config Warning", "config_warning"
|
||||
)
|
||||
|
||||
return hass
|
||||
|
||||
|
||||
|
||||
@@ -46,9 +46,7 @@ ALERT_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NOTIFIERS): cv.ensure_list})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: ALERT_SCHEMA,
|
||||
}),
|
||||
DOMAIN: cv.schema_with_slug_keys(ALERT_SCHEMA),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
|
||||
REQUIREMENTS = ['aioasuswrt==1.1.17']
|
||||
REQUIREMENTS = ['aioasuswrt==1.1.18']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -50,9 +50,7 @@ DEVICE_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: DEVICE_SCHEMA,
|
||||
}),
|
||||
DOMAIN: cv.schema_with_slug_keys(DEVICE_SCHEMA),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
SERVICE_VAPIX_CALL = 'vapix_call'
|
||||
|
||||
@@ -41,7 +41,7 @@ SENSOR_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SENSORS): vol.Schema({cv.slug: SENSOR_SCHEMA}),
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ SENSOR_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SENSORS): vol.Schema({cv.slug: SENSOR_SCHEMA}),
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.const import (
|
||||
CONF_BINARY_SENSORS, CONF_SENSORS, CONF_FILENAME,
|
||||
CONF_MONITORED_CONDITIONS, TEMP_FAHRENHEIT)
|
||||
|
||||
REQUIREMENTS = ['blinkpy==0.11.1']
|
||||
REQUIREMENTS = ['blinkpy==0.11.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -37,8 +37,8 @@ SERVICE_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.Any({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any({
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL, default=DEFAULT_INITIAL):
|
||||
cv.positive_int,
|
||||
@@ -46,7 +46,7 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_RESTORE, default=True): cv.boolean,
|
||||
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
|
||||
}, None)
|
||||
})
|
||||
)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
|
||||
vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
|
||||
vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
|
||||
vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEPENDENCIES = ['scsgate']
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
|
||||
vol.Required(CONF_DEVICES):
|
||||
cv.schema_with_slug_keys(scsgate.SCSGATE_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
|
||||
vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_COVERS): vol.Schema({cv.slug: COVER_SCHEMA}),
|
||||
vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA),
|
||||
})
|
||||
|
||||
DEPENDENCIES = ['velbus']
|
||||
|
||||
@@ -15,7 +15,7 @@ import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['pygatt==3.2.0']
|
||||
REQUIREMENTS = ['pygatt[GATTTOOL]==3.2.0']
|
||||
|
||||
BLE_PREFIX = 'BLE_'
|
||||
MIN_SEEN_NEW = 5
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.event import track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import slugify, dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['locationsharinglib==3.0.9']
|
||||
REQUIREMENTS = ['locationsharinglib==3.0.11']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ FAN_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_FANS): vol.Schema({cv.slug: FAN_SCHEMA}),
|
||||
vol.Required(CONF_FANS): cv.schema_with_slug_keys(FAN_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -111,7 +111,7 @@ ATTR_TRANS_LEVEL = 'trans_level'
|
||||
ATTR_HARDWARE_VERSION = 'hardware_version'
|
||||
|
||||
# Air Humidifier CA
|
||||
ATTR_SPEED = 'speed'
|
||||
ATTR_MOTOR_SPEED = 'motor_speed'
|
||||
ATTR_DEPTH = 'depth'
|
||||
ATTR_DRY = 'dry'
|
||||
|
||||
@@ -223,7 +223,7 @@ AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER = {
|
||||
|
||||
AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_CA = {
|
||||
**AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_COMMON,
|
||||
ATTR_SPEED: 'speed',
|
||||
ATTR_MOTOR_SPEED: 'speed',
|
||||
ATTR_DEPTH: 'depth',
|
||||
ATTR_DRY: 'dry',
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
REQUIREMENTS = ['home-assistant-frontend==20190121.0']
|
||||
REQUIREMENTS = ['home-assistant-frontend==20190121.1']
|
||||
|
||||
DOMAIN = 'frontend'
|
||||
DEPENDENCIES = ['api', 'websocket_api', 'http', 'system_log',
|
||||
|
||||
@@ -34,7 +34,7 @@ GRAPH_SCHEMA = vol.Schema({
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({cv.slug: GRAPH_SCHEMA})
|
||||
DOMAIN: cv.schema_with_slug_keys(GRAPH_SCHEMA)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ SUPPORT_HUE = {
|
||||
}
|
||||
|
||||
ATTR_IS_HUE_GROUP = 'is_hue_group'
|
||||
GAMUT_TYPE_UNAVAILABLE = 'None'
|
||||
# Minimum Hue Bridge API version to support groups
|
||||
# 1.4.0 introduced extended group info
|
||||
# 1.12 introduced the state object for groups
|
||||
@@ -221,16 +222,23 @@ class HueLight(Light):
|
||||
if is_group:
|
||||
self.is_osram = False
|
||||
self.is_philips = False
|
||||
self.gamut_typ = 'None'
|
||||
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
|
||||
self.gamut = None
|
||||
else:
|
||||
self.is_osram = light.manufacturername == 'OSRAM'
|
||||
self.is_philips = light.manufacturername == 'Philips'
|
||||
self.gamut_typ = self.light.colorgamuttype
|
||||
self.gamut = self.light.colorgamut
|
||||
if not self.gamut:
|
||||
err_msg = 'Can not get color gamut of light "%s"'
|
||||
_LOGGER.warning(err_msg, self.name)
|
||||
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
|
||||
if self.gamut:
|
||||
if not color.check_valid_gamut(self.gamut):
|
||||
err = "Please check for software updates of the bridge " \
|
||||
"and/or bulb in the Philips Hue App, " \
|
||||
"Color gamut of %s: %s, not valid, " \
|
||||
"setting gamut to None."
|
||||
_LOGGER.warning(err, self.name, str(self.gamut))
|
||||
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
|
||||
self.gamut = None
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
|
||||
@@ -30,13 +30,13 @@ SERVICE_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.Any({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INITIAL): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}, None)
|
||||
})
|
||||
)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -46,14 +46,15 @@ def has_date_or_time(conf):
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.All({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.All({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_HAS_DATE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_HAS_TIME, default=False): cv.boolean,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
}, has_date_or_time)})
|
||||
}, has_date_or_time)
|
||||
)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -63,8 +63,8 @@ def _cv_input_number(cfg):
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.All({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.All({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_MIN): vol.Coerce(float),
|
||||
vol.Required(CONF_MAX): vol.Coerce(float),
|
||||
@@ -76,7 +76,7 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_MODE, default=MODE_SLIDER):
|
||||
vol.In([MODE_BOX, MODE_SLIDER]),
|
||||
}, _cv_input_number)
|
||||
})
|
||||
)
|
||||
}, required=True, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -64,14 +64,15 @@ def _cv_input_select(cfg):
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.All({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.All({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_OPTIONS):
|
||||
vol.All(cv.ensure_list, vol.Length(min=1), [cv.string]),
|
||||
vol.Optional(CONF_INITIAL): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
}, _cv_input_select)})
|
||||
}, _cv_input_select)
|
||||
)
|
||||
}, required=True, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -55,8 +55,8 @@ def _cv_input_text(cfg):
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.All({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.All({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN, default=0): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=100): vol.Coerce(int),
|
||||
@@ -67,7 +67,7 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT):
|
||||
vol.In([MODE_TEXT, MODE_PASSWORD]),
|
||||
}, _cv_input_text)
|
||||
})
|
||||
)
|
||||
}, required=True, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEPENDENCIES = ['scsgate']
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
|
||||
vol.Required(CONF_DEVICES):
|
||||
cv.schema_with_slug_keys(scsgate.SCSGATE_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ LIGHT_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_LIGHTS): vol.Schema({cv.slug: LIGHT_SCHEMA}),
|
||||
vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LIGHT_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -101,6 +101,8 @@ class LovelaceStorage:
|
||||
|
||||
async def async_save(self, config):
|
||||
"""Save config."""
|
||||
if self._data is None:
|
||||
self._data = {'config': None}
|
||||
self._data['config'] = config
|
||||
await self._store.async_save(self._data)
|
||||
|
||||
|
||||
@@ -47,8 +47,8 @@ CONF_SERVICE_DATA = 'service_data'
|
||||
|
||||
OFF_STATES = [STATE_IDLE, STATE_OFF, STATE_UNAVAILABLE]
|
||||
|
||||
ATTRS_SCHEMA = vol.Schema({cv.slug: cv.string})
|
||||
CMD_SCHEMA = vol.Schema({cv.slug: cv.SERVICE_SCHEMA})
|
||||
ATTRS_SCHEMA = cv.schema_with_slug_keys(cv.string)
|
||||
CMD_SCHEMA = cv.schema_with_slug_keys(cv.SERVICE_SCHEMA)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
|
||||
@@ -92,6 +92,10 @@ def setup(hass, config):
|
||||
|
||||
discovery.listen(hass, SERVICE_OCTOPRINT, device_discovered)
|
||||
|
||||
if DOMAIN not in config:
|
||||
# Skip the setup if there is no configuration present
|
||||
return True
|
||||
|
||||
for printer in config[DOMAIN]:
|
||||
name = printer[CONF_NAME]
|
||||
ssl = 's' if printer[CONF_SSL] else ''
|
||||
|
||||
@@ -19,8 +19,8 @@ CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required."
|
||||
CONF_RELATIVE_URL_REGEX = r'\A/'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: {
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(CONF_TITLE): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
@@ -29,7 +29,9 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
CONF_RELATIVE_URL_REGEX,
|
||||
msg=CONF_RELATIVE_URL_ERROR_MSG),
|
||||
vol.Url()),
|
||||
}})}, extra=vol.ALLOW_EXTRA)
|
||||
})
|
||||
)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
|
||||
@@ -49,9 +49,7 @@ ZONE_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: ZONE_SCHEMA,
|
||||
}),
|
||||
DOMAIN: cv.schema_with_slug_keys(ZONE_SCHEMA),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -131,6 +131,9 @@ class States(Base): # type: ignore
|
||||
_process_timestamp(self.last_changed),
|
||||
_process_timestamp(self.last_updated),
|
||||
context=context,
|
||||
# Temp, because database can still store invalid entity IDs
|
||||
# Remove with 1.0 or in 2020.
|
||||
temp_invalid_id_bypass=True
|
||||
)
|
||||
except ValueError:
|
||||
# When json.loads fails
|
||||
|
||||
@@ -57,7 +57,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(ATTR_HIDDEN, default=True): cv.boolean,
|
||||
vol.Required(CONF_TOKEN): vol.All(str, vol.Length(min=32, max=32)),
|
||||
vol.Optional(CONF_COMMANDS, default={}):
|
||||
vol.Schema({cv.slug: COMMAND_SCHEMA}),
|
||||
cv.schema_with_slug_keys(COMMAND_SCHEMA),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -47,9 +47,7 @@ COMMAND_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: COMMAND_SCHEMA,
|
||||
}),
|
||||
DOMAIN: cv.schema_with_slug_keys(COMMAND_SCHEMA),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -79,8 +79,7 @@ class GttSensor(Entity):
|
||||
def update(self):
|
||||
"""Update device state."""
|
||||
self.data.get_data()
|
||||
next_time = datetime.strptime(
|
||||
self.data.state_bus['time'][0]['run'], "%H:%M")
|
||||
next_time = get_datetime(self.data.state_bus)
|
||||
self._state = next_time.isoformat()
|
||||
|
||||
|
||||
@@ -99,8 +98,7 @@ class GttData:
|
||||
def get_data(self):
|
||||
"""Get the data from the api."""
|
||||
self.bus_list = self._pygtt.get_by_stop(self._stop)
|
||||
self.bus_list.sort(key=lambda b:
|
||||
datetime.strptime(b['time'][0]['run'], "%H:%M"))
|
||||
self.bus_list.sort(key=get_datetime)
|
||||
|
||||
if self._bus_name is not None:
|
||||
self.state_bus = self.get_bus_by_name()
|
||||
@@ -113,3 +111,13 @@ class GttData:
|
||||
for bus in self.bus_list:
|
||||
if bus['bus_name'] == self._bus_name:
|
||||
return bus
|
||||
|
||||
|
||||
def get_datetime(bus):
|
||||
"""Get the datetime from a bus."""
|
||||
bustime = datetime.strptime(bus['time'][0]['run'], "%H:%M")
|
||||
now = datetime.now()
|
||||
bustime = bustime.replace(year=now.year, month=now.month, day=now.day)
|
||||
if bustime < now:
|
||||
bustime = bustime + timedelta(days=1)
|
||||
return bustime
|
||||
|
||||
@@ -45,7 +45,7 @@ SENSOR_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SENSORS): vol.Schema({cv.slug: SENSOR_SCHEMA}),
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA),
|
||||
vol.Optional(CONF_BAUD, default=DEFAULT_BAUD): cv.string,
|
||||
vol.Optional(CONF_DATARATE): cv.positive_int,
|
||||
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.const import (
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['pygatt==3.2.0']
|
||||
REQUIREMENTS = ['pygatt[GATTTOOL]==3.2.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -68,9 +68,9 @@ PLATFORM_SCHEMA = vol.All(PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_GROUP, default=GROUPS[0]): vol.In(GROUPS),
|
||||
vol.Optional(CONF_SENSORS, default={}):
|
||||
vol.Schema({cv.slug: cv.ensure_list}),
|
||||
cv.schema_with_slug_keys(cv.ensure_list),
|
||||
vol.Optional(CONF_CUSTOM, default={}):
|
||||
vol.Schema({cv.slug: CUSTOM_SCHEMA}),
|
||||
cv.schema_with_slug_keys(CUSTOM_SCHEMA),
|
||||
}, extra=vol.PREVENT_EXTRA), _check_sensor_schema)
|
||||
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ SENSOR_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SENSORS): vol.Schema({cv.slug: SENSOR_SCHEMA}),
|
||||
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -21,9 +21,7 @@ DOMAIN = 'shell_command'
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: cv.string,
|
||||
}),
|
||||
DOMAIN: cv.schema_with_slug_keys(cv.string),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ MP1_SWITCH_SLOT_SCHEMA = vol.Schema({
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_SWITCHES, default={}):
|
||||
vol.Schema({cv.slug: SWITCH_SCHEMA}),
|
||||
cv.schema_with_slug_keys(SWITCH_SCHEMA),
|
||||
vol.Optional(CONF_SLOTS, default={}): MP1_SWITCH_SLOT_SCHEMA,
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_MAC): cv.string,
|
||||
|
||||
@@ -28,7 +28,7 @@ SWITCH_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SWITCHES): vol.Schema({cv.slug: SWITCH_SCHEMA}),
|
||||
vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ SWITCH_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SWITCHES): vol.Schema({cv.slug: SWITCH_SCHEMA}),
|
||||
vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -24,7 +24,8 @@ CONF_SCENARIO = 'scenario'
|
||||
CONF_SCS_ID = 'scs_id'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
|
||||
vol.Required(CONF_DEVICES):
|
||||
cv.schema_with_slug_keys(scsgate.SCSGATE_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ SWITCH_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SWITCHES): vol.Schema({cv.slug: SWITCH_SCHEMA}),
|
||||
vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA),
|
||||
})
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
@@ -38,7 +38,7 @@ SWITCH_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SWITCHES): vol.Schema({cv.slug: SWITCH_SCHEMA}),
|
||||
vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA),
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -116,10 +116,17 @@ class TelldusLiveEntity(Entity):
|
||||
def device_info(self):
|
||||
"""Return device info."""
|
||||
device = self._client.device_info(self.device.device_id)
|
||||
return {
|
||||
device_info = {
|
||||
'identifiers': {('tellduslive', self.device.device_id)},
|
||||
'name': self.device.name,
|
||||
'model': device['model'].title(),
|
||||
'manufacturer': device['protocol'].title(),
|
||||
'via_hub': ('tellduslive', device.get('client')),
|
||||
}
|
||||
model = device.get('model')
|
||||
if model is not None:
|
||||
device_info['model'] = model.title()
|
||||
protocol = device.get('protocol')
|
||||
if protocol is not None:
|
||||
device_info['manufacturer'] = protocol.title()
|
||||
client = device.get('client')
|
||||
if client is not None:
|
||||
device_info['via_hub'] = ('tellduslive', client)
|
||||
return device_info
|
||||
|
||||
@@ -53,14 +53,14 @@ SERVICE_SCHEMA_DURATION = vol.Schema({
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: vol.Any({
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any({
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_DURATION, timedelta(DEFAULT_DURATION)):
|
||||
cv.time_period,
|
||||
}, None)
|
||||
})
|
||||
)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
|
||||
@@ -93,8 +93,8 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL, default=DEFAULT_UPDATE_INTERVAL): (
|
||||
vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL))),
|
||||
vol.Optional(CONF_NAME, default={}): vol.Schema(
|
||||
{cv.slug: cv.string}),
|
||||
vol.Optional(CONF_NAME, default={}):
|
||||
cv.schema_with_slug_keys(cv.string),
|
||||
vol.Optional(CONF_RESOURCES): vol.All(
|
||||
cv.ensure_list, [vol.In(RESOURCES)]),
|
||||
vol.Optional(CONF_REGION): cv.string,
|
||||
|
||||
@@ -116,6 +116,9 @@ class IPMAWeather(WeatherEntity):
|
||||
@property
|
||||
def condition(self):
|
||||
"""Return the current condition."""
|
||||
if not self._forecast:
|
||||
return
|
||||
|
||||
return next((k for k, v in CONDITION_CLASSES.items()
|
||||
if self._forecast[0].idWeatherType in v), None)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 86
|
||||
PATCH_VERSION = '0b2'
|
||||
PATCH_VERSION = '4'
|
||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||
REQUIRED_PYTHON_VER = (3, 5, 3)
|
||||
|
||||
@@ -663,11 +663,14 @@ class State:
|
||||
attributes: Optional[Dict] = None,
|
||||
last_changed: Optional[datetime.datetime] = None,
|
||||
last_updated: Optional[datetime.datetime] = None,
|
||||
context: Optional[Context] = None) -> None:
|
||||
context: Optional[Context] = None,
|
||||
# Temp, because database can still store invalid entity IDs
|
||||
# Remove with 1.0 or in 2020.
|
||||
temp_invalid_id_bypass: Optional[bool] = False) -> None:
|
||||
"""Initialize a new state."""
|
||||
state = str(state)
|
||||
|
||||
if not valid_entity_id(entity_id):
|
||||
if not valid_entity_id(entity_id) and not temp_invalid_id_bypass:
|
||||
raise InvalidEntityFormatError((
|
||||
"Invalid entity id encountered: {}. "
|
||||
"Format should be <domain>.<object_id>").format(entity_id))
|
||||
|
||||
@@ -26,6 +26,13 @@ from homeassistant.helpers import template as template_helper
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM' or 'HH:MM:SS'"
|
||||
OLD_SLUG_VALIDATION = r'^[a-z0-9_]+$'
|
||||
OLD_ENTITY_ID_VALIDATION = r"^(\w+)\.(\w+)$"
|
||||
# Keep track of invalid slugs and entity ids found so we can create a
|
||||
# persistent notification. Rare temporary exception to use a global.
|
||||
INVALID_SLUGS_FOUND = {}
|
||||
INVALID_ENTITY_IDS_FOUND = {}
|
||||
|
||||
|
||||
# Home Assistant types
|
||||
byte = vol.All(vol.Coerce(int), vol.Range(min=0, max=255))
|
||||
@@ -149,6 +156,18 @@ def entity_id(value: Any) -> str:
|
||||
value = string(value).lower()
|
||||
if valid_entity_id(value):
|
||||
return value
|
||||
if re.match(OLD_ENTITY_ID_VALIDATION, value):
|
||||
# To ease the breaking change, we allow old slugs for now
|
||||
# Remove after 0.94 or 1.0
|
||||
fixed = '.'.join(util_slugify(part) for part in value.split('.', 1))
|
||||
INVALID_ENTITY_IDS_FOUND[value] = fixed
|
||||
logging.getLogger(__name__).warning(
|
||||
"Found invalid entity_id %s, please update with %s. This "
|
||||
"will become a breaking change.",
|
||||
value, fixed
|
||||
)
|
||||
return value
|
||||
|
||||
raise vol.Invalid('Entity ID {} is an invalid entity id'.format(value))
|
||||
|
||||
|
||||
@@ -329,8 +348,26 @@ def schema_with_slug_keys(value_schema: Union[T, Callable]) -> Callable:
|
||||
|
||||
def verify(value: Dict) -> Dict:
|
||||
"""Validate all keys are slugs and then the value_schema."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid('expected dictionary')
|
||||
|
||||
for key in value.keys():
|
||||
slug(key)
|
||||
try:
|
||||
slug(key)
|
||||
except vol.Invalid:
|
||||
# To ease the breaking change, we allow old slugs for now
|
||||
# Remove after 0.94 or 1.0
|
||||
if re.match(OLD_SLUG_VALIDATION, key):
|
||||
fixed = util_slugify(key)
|
||||
INVALID_SLUGS_FOUND[key] = fixed
|
||||
logging.getLogger(__name__).warning(
|
||||
"Found invalid slug %s, please update with %s. This "
|
||||
"will be come a breaking change.",
|
||||
key, fixed
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
return schema(value)
|
||||
return verify
|
||||
|
||||
|
||||
@@ -122,8 +122,15 @@ class EntityRegistry:
|
||||
entity_id = self.async_get_entity_id(domain, platform, unique_id)
|
||||
if entity_id:
|
||||
return self._async_update_entity(
|
||||
entity_id, config_entry_id=config_entry_id,
|
||||
device_id=device_id)
|
||||
entity_id,
|
||||
config_entry_id=config_entry_id,
|
||||
device_id=device_id,
|
||||
# When we changed our slugify algorithm, we invalidated some
|
||||
# stored entity IDs with either a __ or ending in _.
|
||||
# Fix introduced in 0.86 (Jan 23, 2018). Next line can be
|
||||
# removed when we release 1.0 or in 2019.
|
||||
new_entity_id='.'.join(slugify(part) for part
|
||||
in entity_id.split('.', 1)))
|
||||
|
||||
entity_id = self.async_generate_entity_id(
|
||||
domain, suggested_object_id or '{}_{}'.format(platform, unique_id),
|
||||
|
||||
@@ -4,7 +4,8 @@ import logging
|
||||
from datetime import timedelta, datetime
|
||||
from typing import Any, Dict, List, Set, Optional # noqa pylint_disable=unused-import
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback, State, CoreState
|
||||
from homeassistant.core import (
|
||||
HomeAssistant, callback, State, CoreState, valid_entity_id)
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
|
||||
import homeassistant.util.dt as dt_util
|
||||
@@ -80,7 +81,8 @@ class RestoreStateData():
|
||||
else:
|
||||
data.last_states = {
|
||||
item['state']['entity_id']: StoredState.from_dict(item)
|
||||
for item in stored_states}
|
||||
for item in stored_states
|
||||
if valid_entity_id(item['state']['entity_id'])}
|
||||
_LOGGER.debug(
|
||||
'Created cache with %s', list(data.last_states))
|
||||
|
||||
|
||||
@@ -600,3 +600,21 @@ def check_point_in_lamps_reach(p: Tuple[float, float],
|
||||
t = cross_product(v1, q) / cross_product(v1, v2)
|
||||
|
||||
return (s >= 0.0) and (t >= 0.0) and (s + t <= 1.0)
|
||||
|
||||
|
||||
def check_valid_gamut(Gamut: GamutType) -> bool:
|
||||
"""Check if the supplied gamut is valid."""
|
||||
# Check if the three points of the supplied gamut are not on the same line.
|
||||
v1 = XYPoint(Gamut.green.x - Gamut.red.x, Gamut.green.y - Gamut.red.y)
|
||||
v2 = XYPoint(Gamut.blue.x - Gamut.red.x, Gamut.blue.y - Gamut.red.y)
|
||||
not_on_line = cross_product(v1, v2) > 0.0001
|
||||
|
||||
# Check if all six coordinates of the gamut lie between 0 and 1.
|
||||
red_valid = Gamut.red.x >= 0 and Gamut.red.x <= 1 and \
|
||||
Gamut.red.y >= 0 and Gamut.red.y <= 1
|
||||
green_valid = Gamut.green.x >= 0 and Gamut.green.x <= 1 and \
|
||||
Gamut.green.y >= 0 and Gamut.green.y <= 1
|
||||
blue_valid = Gamut.blue.x >= 0 and Gamut.blue.x <= 1 and \
|
||||
Gamut.blue.y >= 0 and Gamut.blue.y <= 1
|
||||
|
||||
return not_on_line and red_valid and green_valid and blue_valid
|
||||
|
||||
@@ -87,7 +87,7 @@ abodepy==0.15.0
|
||||
afsapi==0.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.1.17
|
||||
aioasuswrt==1.1.18
|
||||
|
||||
# homeassistant.components.device_tracker.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -196,7 +196,7 @@ bellows==0.7.0
|
||||
bimmer_connected==0.5.3
|
||||
|
||||
# homeassistant.components.blink
|
||||
blinkpy==0.11.1
|
||||
blinkpy==0.11.2
|
||||
|
||||
# homeassistant.components.light.blinksticklight
|
||||
blinkstick==1.1.8
|
||||
@@ -514,7 +514,7 @@ hole==0.3.0
|
||||
holidays==0.9.9
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20190121.0
|
||||
home-assistant-frontend==20190121.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.2
|
||||
@@ -625,7 +625,7 @@ liveboxplaytv==2.0.2
|
||||
lmnotify==0.0.4
|
||||
|
||||
# homeassistant.components.device_tracker.google_maps
|
||||
locationsharinglib==3.0.9
|
||||
locationsharinglib==3.0.11
|
||||
|
||||
# homeassistant.components.logi_circle
|
||||
logi_circle==0.1.7
|
||||
@@ -1003,7 +1003,7 @@ pyfttt==0.3
|
||||
|
||||
# homeassistant.components.device_tracker.bluetooth_le_tracker
|
||||
# homeassistant.components.sensor.skybeacon
|
||||
pygatt==3.2.0
|
||||
pygatt[GATTTOOL]==3.2.0
|
||||
|
||||
# homeassistant.components.cover.gogogate2
|
||||
pygogogate2==0.1.1
|
||||
|
||||
@@ -107,7 +107,7 @@ hdate==0.8.7
|
||||
holidays==0.9.9
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20190121.0
|
||||
home-assistant-frontend==20190121.1
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.3
|
||||
|
||||
@@ -701,7 +701,9 @@ def test_available():
|
||||
"""Test available property."""
|
||||
light = hue_light.HueLight(
|
||||
light=Mock(state={'reachable': False},
|
||||
raw=LIGHT_RAW),
|
||||
raw=LIGHT_RAW,
|
||||
colorgamuttype=LIGHT_GAMUT_TYPE,
|
||||
colorgamut=LIGHT_GAMUT),
|
||||
request_bridge_update=None,
|
||||
bridge=Mock(allow_unreachable=False),
|
||||
is_group=False,
|
||||
@@ -711,7 +713,9 @@ def test_available():
|
||||
|
||||
light = hue_light.HueLight(
|
||||
light=Mock(state={'reachable': False},
|
||||
raw=LIGHT_RAW),
|
||||
raw=LIGHT_RAW,
|
||||
colorgamuttype=LIGHT_GAMUT_TYPE,
|
||||
colorgamut=LIGHT_GAMUT),
|
||||
request_bridge_update=None,
|
||||
bridge=Mock(allow_unreachable=True),
|
||||
is_group=False,
|
||||
@@ -721,7 +725,9 @@ def test_available():
|
||||
|
||||
light = hue_light.HueLight(
|
||||
light=Mock(state={'reachable': False},
|
||||
raw=LIGHT_RAW),
|
||||
raw=LIGHT_RAW,
|
||||
colorgamuttype=LIGHT_GAMUT_TYPE,
|
||||
colorgamut=LIGHT_GAMUT),
|
||||
request_bridge_update=None,
|
||||
bridge=Mock(allow_unreachable=False),
|
||||
is_group=True,
|
||||
|
||||
@@ -50,6 +50,27 @@ async def test_lovelace_from_storage(hass, hass_ws_client, hass_storage):
|
||||
}
|
||||
|
||||
|
||||
async def test_lovelace_from_storage_save_before_load(hass, hass_ws_client,
|
||||
hass_storage):
|
||||
"""Test we can load lovelace config from storage."""
|
||||
assert await async_setup_component(hass, 'lovelace', {})
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
# Store new config
|
||||
await client.send_json({
|
||||
'id': 6,
|
||||
'type': 'lovelace/config/save',
|
||||
'config': {
|
||||
'yo': 'hello'
|
||||
}
|
||||
})
|
||||
response = await client.receive_json()
|
||||
assert response['success']
|
||||
assert hass_storage[lovelace.STORAGE_KEY]['data'] == {
|
||||
'config': {'yo': 'hello'}
|
||||
}
|
||||
|
||||
|
||||
async def test_lovelace_from_yaml(hass, hass_ws_client):
|
||||
"""Test we load lovelace config from yaml."""
|
||||
assert await async_setup_component(hass, 'lovelace', {
|
||||
|
||||
@@ -142,3 +142,12 @@ class TestRecorderRuns(unittest.TestCase):
|
||||
|
||||
assert sorted(run.entity_ids()) == ['sensor.humidity', 'sensor.lux']
|
||||
assert run.entity_ids(in_run2) == ['sensor.humidity']
|
||||
|
||||
|
||||
def test_states_from_native_invalid_entity_id():
|
||||
"""Test loading a state from an invalid entity ID."""
|
||||
event = States()
|
||||
event.entity_id = "test.invalid__id"
|
||||
event.attributes = "{}"
|
||||
state = event.to_native()
|
||||
assert state.entity_id == 'test.invalid__id'
|
||||
|
||||
@@ -602,3 +602,31 @@ def test_comp_entity_ids():
|
||||
for invalid in (['light.kitchen', 'not-entity-id'], '*', ''):
|
||||
with pytest.raises(vol.Invalid):
|
||||
schema(invalid)
|
||||
|
||||
|
||||
def test_schema_with_slug_keys_allows_old_slugs(caplog):
|
||||
"""Test schema with slug keys allowing old slugs."""
|
||||
schema = cv.schema_with_slug_keys(str)
|
||||
|
||||
with patch.dict(cv.INVALID_SLUGS_FOUND, clear=True):
|
||||
for value in ('_world', 'wow__yeah'):
|
||||
caplog.clear()
|
||||
# Will raise if not allowing old slugs
|
||||
schema({value: 'yo'})
|
||||
assert "Found invalid slug {}".format(value) in caplog.text
|
||||
|
||||
assert len(cv.INVALID_SLUGS_FOUND) == 2
|
||||
|
||||
|
||||
def test_entity_id_allow_old_validation(caplog):
|
||||
"""Test schema allowing old entity_ids."""
|
||||
schema = vol.Schema(cv.entity_id)
|
||||
|
||||
with patch.dict(cv.INVALID_ENTITY_IDS_FOUND, clear=True):
|
||||
for value in ('hello.__world', 'great.wow__yeah'):
|
||||
caplog.clear()
|
||||
# Will raise if not allowing old entity ID
|
||||
schema(value)
|
||||
assert "Found invalid entity_id {}".format(value) in caplog.text
|
||||
|
||||
assert len(cv.INVALID_ENTITY_IDS_FOUND) == 2
|
||||
|
||||
@@ -4,6 +4,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import valid_entity_id
|
||||
from homeassistant.helpers import entity_registry
|
||||
|
||||
from tests.common import mock_registry, flush_store
|
||||
@@ -222,3 +223,36 @@ async def test_migration(hass):
|
||||
assert entry.name == 'Test Name'
|
||||
assert entry.disabled_by == 'hass'
|
||||
assert entry.config_entry_id == 'test-config-id'
|
||||
|
||||
|
||||
async def test_loading_invalid_entity_id(hass, hass_storage):
|
||||
"""Test we autofix invalid entity IDs."""
|
||||
hass_storage[entity_registry.STORAGE_KEY] = {
|
||||
'version': entity_registry.STORAGE_VERSION,
|
||||
'data': {
|
||||
'entities': [
|
||||
{
|
||||
'entity_id': 'test.invalid__middle',
|
||||
'platform': 'super_platform',
|
||||
'unique_id': 'id-invalid-middle',
|
||||
'name': 'registry override',
|
||||
}, {
|
||||
'entity_id': 'test.invalid_end_',
|
||||
'platform': 'super_platform',
|
||||
'unique_id': 'id-invalid-end',
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
registry = await entity_registry.async_get_registry(hass)
|
||||
|
||||
entity_invalid_middle = registry.async_get_or_create(
|
||||
'test', 'super_platform', 'id-invalid-middle')
|
||||
|
||||
assert valid_entity_id(entity_invalid_middle.entity_id)
|
||||
|
||||
entity_invalid_end = registry.async_get_or_create(
|
||||
'test', 'super_platform', 'id-invalid-end')
|
||||
|
||||
assert valid_entity_id(entity_invalid_end.entity_id)
|
||||
|
||||
@@ -6,7 +6,8 @@ from homeassistant.core import CoreState, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.restore_state import (
|
||||
RestoreStateData, RestoreEntity, StoredState, DATA_RESTORE_STATE_TASK)
|
||||
RestoreStateData, RestoreEntity, StoredState, DATA_RESTORE_STATE_TASK,
|
||||
STORAGE_KEY)
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from asynctest import patch
|
||||
@@ -218,3 +219,34 @@ async def test_state_saved_on_remove(hass):
|
||||
|
||||
# We should store the input boolean state when it is removed
|
||||
assert data.last_states['input_boolean.b0'].state.state == 'on'
|
||||
|
||||
|
||||
async def test_restoring_invalid_entity_id(hass, hass_storage):
|
||||
"""Test restoring invalid entity IDs."""
|
||||
entity = RestoreEntity()
|
||||
entity.hass = hass
|
||||
entity.entity_id = 'test.invalid__entity_id'
|
||||
now = dt_util.utcnow().isoformat()
|
||||
hass_storage[STORAGE_KEY] = {
|
||||
'version': 1,
|
||||
'key': STORAGE_KEY,
|
||||
'data': [
|
||||
{
|
||||
'state': {
|
||||
'entity_id': 'test.invalid__entity_id',
|
||||
'state': 'off',
|
||||
'attributes': {},
|
||||
'last_changed': now,
|
||||
'last_updated': now,
|
||||
'context': {
|
||||
'id': '3c2243ff5f30447eb12e7348cfd5b8ff',
|
||||
'user_id': None
|
||||
}
|
||||
},
|
||||
'last_seen': dt_util.utcnow().isoformat()
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
state = await entity.async_get_last_state()
|
||||
assert state is None
|
||||
|
||||
@@ -8,6 +8,18 @@ import voluptuous as vol
|
||||
GAMUT = color_util.GamutType(color_util.XYPoint(0.704, 0.296),
|
||||
color_util.XYPoint(0.2151, 0.7106),
|
||||
color_util.XYPoint(0.138, 0.08))
|
||||
GAMUT_INVALID_1 = color_util.GamutType(color_util.XYPoint(0.704, 0.296),
|
||||
color_util.XYPoint(-0.201, 0.7106),
|
||||
color_util.XYPoint(0.138, 0.08))
|
||||
GAMUT_INVALID_2 = color_util.GamutType(color_util.XYPoint(0.704, 1.296),
|
||||
color_util.XYPoint(0.2151, 0.7106),
|
||||
color_util.XYPoint(0.138, 0.08))
|
||||
GAMUT_INVALID_3 = color_util.GamutType(color_util.XYPoint(0.0, 0.0),
|
||||
color_util.XYPoint(0.0, 0.0),
|
||||
color_util.XYPoint(0.0, 0.0))
|
||||
GAMUT_INVALID_4 = color_util.GamutType(color_util.XYPoint(0.1, 0.1),
|
||||
color_util.XYPoint(0.3, 0.3),
|
||||
color_util.XYPoint(0.7, 0.7))
|
||||
|
||||
|
||||
class TestColorUtil(unittest.TestCase):
|
||||
@@ -338,6 +350,14 @@ class TestColorUtil(unittest.TestCase):
|
||||
assert color_util.color_rgb_to_hex(51, 153, 255) == '3399ff'
|
||||
assert color_util.color_rgb_to_hex(255, 67.9204190, 0) == 'ff4400'
|
||||
|
||||
def test_gamut(self):
|
||||
"""Test gamut functions."""
|
||||
assert color_util.check_valid_gamut(GAMUT)
|
||||
assert not color_util.check_valid_gamut(GAMUT_INVALID_1)
|
||||
assert not color_util.check_valid_gamut(GAMUT_INVALID_2)
|
||||
assert not color_util.check_valid_gamut(GAMUT_INVALID_3)
|
||||
assert not color_util.check_valid_gamut(GAMUT_INVALID_4)
|
||||
|
||||
|
||||
class ColorTemperatureMiredToKelvinTests(unittest.TestCase):
|
||||
"""Test color_temperature_mired_to_kelvin."""
|
||||
|
||||
Reference in New Issue
Block a user