Compare commits

...

34 Commits

Author SHA1 Message Date
Franck Nijhof ae4fc9504a 2024.8.1 (#123544) 2024-08-10 19:32:02 +02:00
Franck Nijhof 2ef337ec2e Bump version to 2024.8.1 2024-08-10 18:41:57 +02:00
cnico 723b7bd532 Upgrade chacon_dio_api to version 1.2.0 (#123528)
Upgrade api version 1.2.0 with the first user feedback improvement
2024-08-10 18:41:39 +02:00
Joost Lekkerkerker 4fdb11b0d8 Bump AirGradient to 0.8.0 (#123527) 2024-08-10 18:41:36 +02:00
Matt Way fe2e6c37f4 Bump pydaikin to 2.13.2 (#123519) 2024-08-10 18:41:32 +02:00
Michael 4a75c55a8f Fix cleanup of old orphan device entries in AVM Fritz!Tools (#123516)
fix cleanup of old orphan device entries
2024-08-10 18:41:29 +02:00
Duco Sebel dfb59469cf Bumb python-homewizard-energy to 6.2.0 (#123514) 2024-08-10 18:41:26 +02:00
David F. Mulcahey bdb2e1e2e9 Bump zha lib to 0.0.30 (#123499) 2024-08-10 18:41:22 +02:00
Franck Nijhof c4f6f1e3d8 Update frontend to 20240809.0 (#123485) 2024-08-10 18:41:19 +02:00
Louis Christ fb3eae54ea Fix startup blocked by bluesound integration (#123483) 2024-08-10 18:41:16 +02:00
Jake Martin d3f8fce788 Bump monzopy to 1.3.2 (#123480) 2024-08-10 18:41:13 +02:00
Steve Easley 44e58a8c87 Bump pyjvcprojector to 1.0.12 to fix blocking call (#123473) 2024-08-10 18:41:09 +02:00
puddly 3d3879b0db Bump ZHA library to 0.0.29 (#123464)
* Bump zha to 0.0.29

* Pass the Core timezone to ZHA

* Add a unit test
2024-08-10 18:41:06 +02:00
Franck Nijhof a8b1eb34f3 Support action YAML syntax in old-style notify groups (#123457) 2024-08-10 18:41:03 +02:00
Matrix fd77058def Bump YoLink API to 0.4.7 (#123441) 2024-08-10 18:41:00 +02:00
Brett Adams b147ca6c5b Add missing logger to Tessie (#123413) 2024-08-10 18:40:57 +02:00
dupondje 670c4cacfa Also migrate dsmr entries for devices with correct serial (#123407)
dsmr: also migrate entries for devices with correct serial

When the dsmr code could not find the serial_nr for the gas meter,
it creates the gas meter device with the entry_id as identifier.

But when there is a correct serial_nr, it will use that as identifier
for the dsmr gas device.

Now the migration code did not take this into account, so migration to
the new name failed since it didn't look for the device with correct
serial_nr.

This commit fixes this and adds a test for this.
2024-08-10 18:40:53 +02:00
J. Nick Koston 1ed0a89303 Bump aiohttp to 3.10.2 (#123394) 2024-08-10 18:40:50 +02:00
J. Nick Koston ab0597da7b Ensure legacy event foreign key is removed from the states table when a previous rebuild failed (#123388)
* Ensure legacy event foreign key is removed from the states table

If the system ran out of disk space removing the FK, it would
fail. #121938 fixed that to try again, however that PR was made
ineffective by #122069 since it will never reach the check.

To solve this, the migration version is incremented to 2, and
the migration is no longer marked as done unless the rebuild
/fk removal is successful.

* fix logic for mysql

* fix test

* asserts

* coverage

* coverage

* narrow test

* fixes

* split tests

* should have skipped

* fixture must be used
2024-08-10 18:40:47 +02:00
Erik Montnemery a3db6bc8fa Revert "Fix blocking I/O while validating config schema" (#123377) 2024-08-10 18:40:44 +02:00
Noah Husby 9bfc8f6e27 Bump aiorussound to 2.2.2 (#123319) 2024-08-10 18:40:41 +02:00
J. Nick Koston 6fddef2dc5 Fix doorbird with externally added events (#123313) 2024-08-10 18:40:38 +02:00
fustom ec08a85aa0 Fix limit and order property for transmission integration (#123305) 2024-08-10 18:40:35 +02:00
Evgeny de7af575c5 Bump OpenWeatherMap to 0.1.1 (#120178)
* add owm modes

* fix tests

* fix modes

* remove sensors

* Update homeassistant/components/openweathermap/sensor.py

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-08-10 18:40:32 +02:00
Tom Brien d3831bae4e Add support for v3 Coinbase API (#116345)
* Add support for v3 Coinbase API

* Add deps

* Move tests
2024-08-10 18:40:28 +02:00
Franck Nijhof 86722ba05e 2024.8.0 (#123276) 2024-08-07 20:20:43 +02:00
Franck Nijhof be4810731a Bump version to 2024.8.0 2024-08-07 19:04:33 +02:00
Franck Nijhof ac6abb363c Bump version to 2024.8.0b9 2024-08-07 18:24:15 +02:00
Michael Hansen 5367886732 Bump intents to 2024.8.7 (#123295) 2024-08-07 18:24:08 +02:00
Stefan Agner 7a51d4ff62 Drop Matter Microwave Oven Mode select entity (#123294) 2024-08-07 18:24:05 +02:00
ashalita ef564c537d Revert "Upgrade pycoolmasternet-async to 0.2.0" (#123286) 2024-08-07 18:24:02 +02:00
Franck Nijhof 082290b092 Bump version to 2024.8.0b8 2024-08-07 13:15:23 +02:00
Franck Nijhof 4a212791a2 Update wled to 0.20.1 (#123283) 2024-08-07 13:15:12 +02:00
Brett Adams 6bb55ce79e Add missing application credential to Tesla Fleet (#123271)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
2024-08-07 13:15:04 +02:00
63 changed files with 1190 additions and 356 deletions
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airgradient==0.7.1"],
"requirements": ["airgradient==0.8.0"],
"zeroconf": ["_airgradient._tcp.local."]
}
@@ -317,21 +317,24 @@ class BluesoundPlayer(MediaPlayerEntity):
await self.async_update_status()
except (TimeoutError, ClientError):
_LOGGER.error("Node %s:%s is offline, retrying later", self.name, self.port)
_LOGGER.error("Node %s:%s is offline, retrying later", self.host, self.port)
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
self.start_polling()
except CancelledError:
_LOGGER.debug("Stopping the polling of node %s:%s", self.name, self.port)
_LOGGER.debug("Stopping the polling of node %s:%s", self.host, self.port)
except Exception:
_LOGGER.exception("Unexpected error in %s:%s", self.name, self.port)
_LOGGER.exception("Unexpected error in %s:%s", self.host, self.port)
raise
async def async_added_to_hass(self) -> None:
"""Start the polling task."""
await super().async_added_to_hass()
self._polling_task = self.hass.async_create_task(self._start_poll_command())
self._polling_task = self.hass.async_create_background_task(
self._start_poll_command(),
name=f"bluesound.polling_{self.host}:{self.port}",
)
async def async_will_remove_from_hass(self) -> None:
"""Stop the polling task."""
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/chacon_dio",
"iot_class": "cloud_push",
"loggers": ["dio_chacon_api"],
"requirements": ["dio-chacon-wifi-api==1.1.0"]
"requirements": ["dio-chacon-wifi-api==1.2.0"]
}
+84 -19
View File
@@ -5,7 +5,9 @@ from __future__ import annotations
from datetime import timedelta
import logging
from coinbase.wallet.client import Client
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
from homeassistant.config_entries import ConfigEntry
@@ -15,8 +17,23 @@ from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.util import Throttle
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_AVALIABLE,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_HOLD,
API_ACCOUNT_ID,
API_ACCOUNTS_DATA,
API_ACCOUNT_NAME,
API_ACCOUNT_VALUE,
API_ACCOUNTS,
API_DATA,
API_RATES_CURRENCY,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
API_V3_ACCOUNT_ID,
API_V3_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_RATES,
@@ -59,9 +76,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData:
"""Create and update a Coinbase Data instance."""
client = Client(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
if "organizations" not in entry.data[CONF_API_KEY]:
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
version = "v2"
else:
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
version = "v3"
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
instance = CoinbaseData(client, base_rate)
instance = CoinbaseData(client, base_rate, version)
instance.update()
return instance
@@ -86,42 +110,83 @@ async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> Non
registry.async_remove(entity.entity_id)
def get_accounts(client):
def get_accounts(client, version):
"""Handle paginated accounts."""
response = client.get_accounts()
accounts = response[API_ACCOUNTS_DATA]
next_starting_after = response.pagination.next_starting_after
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_ACCOUNTS_DATA]
if version == "v2":
accounts = response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return accounts
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return [
{
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
}
for account in accounts
]
accounts = response[API_ACCOUNTS]
while response["has_next"]:
response = client.get_accounts(cursor=response["cursor"])
accounts += response["accounts"]
return [
{
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
}
for account in accounts
]
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, client, exchange_base):
def __init__(self, client, exchange_base, version):
"""Init the coinbase data object."""
self.client = client
self.accounts = None
self.exchange_base = exchange_base
self.exchange_rates = None
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
if version == "v2":
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
else:
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
self.api_version = version
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = get_accounts(self.client)
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
except AuthenticationError as coinbase_error:
self.accounts = get_accounts(self.client, self.api_version)
if self.api_version == "v2":
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
else:
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except (AuthenticationError, HTTPError) as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)
@@ -5,7 +5,9 @@ from __future__ import annotations
import logging
from typing import Any
from coinbase.wallet.client import Client
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
@@ -15,18 +17,17 @@ from homeassistant.config_entries import (
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from . import get_accounts
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_DATA,
API_RATES,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_PRECISION,
@@ -49,8 +50,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
def get_user_from_client(api_key, api_token):
"""Get the user name from Coinbase API credentials."""
client = Client(api_key, api_token)
return client.get_current_user()
if "organizations" not in api_key:
client = LegacyClient(api_key, api_token)
return client.get_current_user()["name"]
client = RESTClient(api_key=api_key, api_secret=api_token)
return client.get_portfolios()["portfolios"][0]["name"]
async def validate_api(hass: HomeAssistant, data):
@@ -60,11 +64,13 @@ async def validate_api(hass: HomeAssistant, data):
user = await hass.async_add_executor_job(
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
)
except AuthenticationError as error:
if "api key" in str(error):
except (AuthenticationError, HTTPError) as error:
if "api key" in str(error) or " 401 Client Error" in str(error):
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
raise InvalidKey from error
if "invalid signature" in str(error):
if "invalid signature" in str(
error
) or "'Could not deserialize key data" in str(error):
_LOGGER.debug(
"Coinbase rejected API credentials due to an invalid API secret"
)
@@ -73,8 +79,8 @@ async def validate_api(hass: HomeAssistant, data):
raise InvalidAuth from error
except ConnectionError as error:
raise CannotConnect from error
return {"title": user["name"]}
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
return {"title": user, "api_version": api_version}
async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, options):
@@ -82,14 +88,20 @@ async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, optio
client = hass.data[DOMAIN][config_entry.entry_id].client
accounts = await hass.async_add_executor_job(get_accounts, client)
accounts = await hass.async_add_executor_job(
get_accounts, client, config_entry.data.get("api_version", "v2")
)
accounts_currencies = [
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
account[API_ACCOUNT_CURRENCY]
for account in accounts
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
if not account[ACCOUNT_IS_VAULT]
]
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
if config_entry.data.get("api_version", "v2") == "v2":
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
else:
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if CONF_CURRENCIES in options:
for currency in options[CONF_CURRENCIES]:
if currency not in accounts_currencies:
@@ -134,6 +146,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_API_VERSION] = info["api_version"]
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
+10 -1
View File
@@ -1,5 +1,7 @@
"""Constants used for Coinbase."""
ACCOUNT_IS_VAULT = "is_vault"
CONF_CURRENCIES = "account_balance_currencies"
CONF_EXCHANGE_BASE = "exchange_base"
CONF_EXCHANGE_RATES = "exchange_rate_currencies"
@@ -10,18 +12,25 @@ DOMAIN = "coinbase"
# Constants for data returned by Coinbase API
API_ACCOUNT_AMOUNT = "amount"
API_ACCOUNT_AVALIABLE = "available_balance"
API_ACCOUNT_BALANCE = "balance"
API_ACCOUNT_CURRENCY = "currency"
API_ACCOUNT_CURRENCY_CODE = "code"
API_ACCOUNT_HOLD = "hold"
API_ACCOUNT_ID = "id"
API_ACCOUNT_NATIVE_BALANCE = "balance"
API_ACCOUNT_NAME = "name"
API_ACCOUNTS_DATA = "data"
API_ACCOUNT_VALUE = "value"
API_ACCOUNTS = "accounts"
API_DATA = "data"
API_RATES = "rates"
API_RATES_CURRENCY = "currency"
API_RESOURCE_PATH = "resource_path"
API_RESOURCE_TYPE = "type"
API_TYPE_VAULT = "vault"
API_USD = "USD"
API_V3_ACCOUNT_ID = "uuid"
API_V3_TYPE_VAULT = "ACCOUNT_TYPE_VAULT"
WALLETS = {
"1INCH": "1INCH",
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase==2.1.0"]
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
}
+41 -28
View File
@@ -12,15 +12,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import CoinbaseData
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_ID,
API_ACCOUNT_NAME,
API_RATES,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_PRECISION,
CONF_EXCHANGE_PRECISION_DEFAULT,
@@ -31,6 +28,7 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
ATTR_NATIVE_BALANCE = "Balance in native currency"
ATTR_API_VERSION = "API Version"
CURRENCY_ICONS = {
"BTC": "mdi:currency-btc",
@@ -56,9 +54,9 @@ async def async_setup_entry(
entities: list[SensorEntity] = []
provided_currencies: list[str] = [
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
account[API_ACCOUNT_CURRENCY]
for account in instance.accounts
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
if not account[ACCOUNT_IS_VAULT]
]
desired_currencies: list[str] = []
@@ -73,6 +71,11 @@ async def async_setup_entry(
)
for currency in desired_currencies:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
currency,
instance.api_version,
)
if currency not in provided_currencies:
_LOGGER.warning(
(
@@ -85,12 +88,17 @@ async def async_setup_entry(
entities.append(AccountSensor(instance, currency))
if CONF_EXCHANGE_RATES in config_entry.options:
entities.extend(
ExchangeRateSensor(
instance, rate, exchange_base_currency, exchange_precision
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
rate,
instance.api_version,
)
entities.append(
ExchangeRateSensor(
instance, rate, exchange_base_currency, exchange_precision
)
)
for rate in config_entry.options[CONF_EXCHANGE_RATES]
)
async_add_entities(entities)
@@ -105,26 +113,21 @@ class AccountSensor(SensorEntity):
self._coinbase_data = coinbase_data
self._currency = currency
for account in coinbase_data.accounts:
if (
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
):
if account[API_ACCOUNT_CURRENCY] != currency or account[ACCOUNT_IS_VAULT]:
continue
self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}"
self._attr_unique_id = (
f"coinbase-{account[API_ACCOUNT_ID]}-wallet-"
f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}"
f"{account[API_ACCOUNT_CURRENCY]}"
)
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
]
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY]
self._attr_icon = CURRENCY_ICONS.get(
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE],
account[API_ACCOUNT_CURRENCY],
DEFAULT_COIN_ICON,
)
self._native_balance = round(
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
float(account[API_ACCOUNT_AMOUNT])
/ float(coinbase_data.exchange_rates[API_RATES][currency]),
2,
)
@@ -144,21 +147,26 @@ class AccountSensor(SensorEntity):
"""Return the state attributes of the sensor."""
return {
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
ATTR_API_VERSION: self._coinbase_data.api_version,
}
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s account sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
for account in self._coinbase_data.accounts:
if (
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
!= self._currency
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
account[API_ACCOUNT_CURRENCY] != self._currency
or account[ACCOUNT_IS_VAULT]
):
continue
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
self._native_balance = round(
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
float(account[API_ACCOUNT_AMOUNT])
/ float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
2,
)
@@ -202,8 +210,13 @@ class ExchangeRateSensor(SensorEntity):
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s rate sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
self._attr_native_value = round(
1 / float(self._coinbase_data.exchange_rates.rates[self._currency]),
1 / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
self._precision,
)
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.29"]
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"]
}
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coolmaster",
"iot_class": "local_polling",
"loggers": ["pycoolmasternet_async"],
"requirements": ["pycoolmasternet-async==0.2.0"]
"requirements": ["pycoolmasternet-async==0.1.5"]
}
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/daikin",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.13.1"],
"requirements": ["pydaikin==2.13.2"],
"zeroconf": ["_dkapi._tcp.local."]
}
+1 -1
View File
@@ -195,7 +195,7 @@ class ConfiguredDoorBird:
title: str | None = data.get("title")
if not title or not title.startswith("Home Assistant"):
continue
event = title.split("(")[1].strip(")")
event = title.partition("(")[2].strip(")")
if input_type := favorite_input_type.get(identifier):
events.append(DoorbirdEvent(event, input_type))
elif input_type := default_event_types.get(event):
+34 -33
View File
@@ -431,41 +431,42 @@ def rename_old_gas_to_mbus(
) -> None:
"""Rename old gas sensor to mbus variant."""
dev_reg = dr.async_get(hass)
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)})
if device_entry_v1 is not None:
device_id = device_entry_v1.id
for dev_id in (mbus_device_id, entry.entry_id):
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, dev_id)})
if device_entry_v1 is not None:
device_id = device_entry_v1.id
ent_reg = er.async_get(hass)
entries = er.async_entries_for_device(ent_reg, device_id)
ent_reg = er.async_get(hass)
entries = er.async_entries_for_device(ent_reg, device_id)
for entity in entries:
if entity.unique_id.endswith(
"belgium_5min_gas_meter_reading"
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
try:
ent_reg.async_update_entity(
entity.entity_id,
new_unique_id=mbus_device_id,
device_id=mbus_device_id,
)
except ValueError:
LOGGER.debug(
"Skip migration of %s because it already exists",
entity.entity_id,
)
else:
LOGGER.debug(
"Migrated entity %s from unique id %s to %s",
entity.entity_id,
entity.unique_id,
mbus_device_id,
)
# Cleanup old device
dev_entities = er.async_entries_for_device(
ent_reg, device_id, include_disabled_entities=True
)
if not dev_entities:
dev_reg.async_remove_device(device_id)
for entity in entries:
if entity.unique_id.endswith(
"belgium_5min_gas_meter_reading"
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
try:
ent_reg.async_update_entity(
entity.entity_id,
new_unique_id=mbus_device_id,
device_id=mbus_device_id,
)
except ValueError:
LOGGER.debug(
"Skip migration of %s because it already exists",
entity.entity_id,
)
else:
LOGGER.debug(
"Migrated entity %s from unique id %s to %s",
entity.entity_id,
entity.unique_id,
mbus_device_id,
)
# Cleanup old device
dev_entities = er.async_entries_for_device(
ent_reg, device_id, include_disabled_entities=True
)
if not dev_entities:
dev_reg.async_remove_device(device_id)
def is_supported_description(
@@ -653,8 +653,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
entities: list[er.RegistryEntry] = er.async_entries_for_config_entry(
entity_reg, config_entry.entry_id
)
orphan_macs: set[str] = set()
for entity in entities:
entry_mac = entity.unique_id.split("_")[0]
if (
@@ -662,17 +660,16 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
or "_internet_access" in entity.unique_id
) and entry_mac not in device_hosts:
_LOGGER.info("Removing orphan entity entry %s", entity.entity_id)
orphan_macs.add(entry_mac)
entity_reg.async_remove(entity.entity_id)
device_reg = dr.async_get(self.hass)
orphan_connections = {
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in orphan_macs
valid_connections = {
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in device_hosts
}
for device in dr.async_entries_for_config_entry(
device_reg, config_entry.entry_id
):
if any(con in device.connections for con in orphan_connections):
if not any(con in device.connections for con in valid_connections):
_LOGGER.debug("Removing obsolete device entry %s", device.name)
device_reg.async_update_device(
device.id, remove_config_entry_id=config_entry.entry_id
@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20240806.1"]
"requirements": ["home-assistant-frontend==20240809.0"]
}
+30 -3
View File
@@ -22,8 +22,9 @@ from homeassistant.components.notify import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SERVICE,
CONF_ACTION,
CONF_ENTITIES,
CONF_SERVICE,
STATE_UNAVAILABLE,
)
from homeassistant.core import HomeAssistant, callback
@@ -36,11 +37,37 @@ from .entity import GroupEntity
CONF_SERVICES = "services"
def _backward_compat_schema(value: Any | None) -> Any:
"""Backward compatibility for notify service schemas."""
if not isinstance(value, dict):
return value
# `service` has been renamed to `action`
if CONF_SERVICE in value:
if CONF_ACTION in value:
raise vol.Invalid(
"Cannot specify both 'service' and 'action'. Please use 'action' only."
)
value[CONF_ACTION] = value.pop(CONF_SERVICE)
return value
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SERVICES): vol.All(
cv.ensure_list,
[{vol.Required(ATTR_SERVICE): cv.slug, vol.Optional(ATTR_DATA): dict}],
[
vol.All(
_backward_compat_schema,
{
vol.Required(CONF_ACTION): cv.slug,
vol.Optional(ATTR_DATA): dict,
},
)
],
)
}
)
@@ -88,7 +115,7 @@ class GroupNotifyPlatform(BaseNotificationService):
tasks.append(
asyncio.create_task(
self.hass.services.async_call(
DOMAIN, entity[ATTR_SERVICE], sending_payload, blocking=True
DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True
)
)
)
@@ -7,6 +7,6 @@
"iot_class": "local_polling",
"loggers": ["homewizard_energy"],
"quality_scale": "platinum",
"requirements": ["python-homewizard-energy==v6.1.1"],
"requirements": ["python-homewizard-energy==v6.2.0"],
"zeroconf": ["_hwenergy._tcp.local."]
}
@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["jvcprojector"],
"requirements": ["pyjvcprojector==1.0.11"]
"requirements": ["pyjvcprojector==1.0.12"]
}
-13
View File
@@ -27,7 +27,6 @@ type SelectCluster = (
| clusters.RvcRunMode
| clusters.RvcCleanMode
| clusters.DishwasherMode
| clusters.MicrowaveOvenMode
| clusters.EnergyEvseMode
| clusters.DeviceEnergyManagementMode
)
@@ -199,18 +198,6 @@ DISCOVERY_SCHEMAS = [
clusters.DishwasherMode.Attributes.SupportedModes,
),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
key="MatterMicrowaveOvenMode",
translation_key="mode",
),
entity_class=MatterModeSelectEntity,
required_attributes=(
clusters.MicrowaveOvenMode.Attributes.CurrentMode,
clusters.MicrowaveOvenMode.Attributes.SupportedModes,
),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
+1 -1
View File
@@ -6,5 +6,5 @@
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/monzo",
"iot_class": "cloud_polling",
"requirements": ["monzopy==1.3.0"]
"requirements": ["monzopy==1.3.2"]
}
@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import dataclass
import logging
from pyopenweathermap import OWMClient
from pyopenweathermap import create_owm_client
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -33,6 +33,7 @@ class OpenweathermapData:
"""Runtime data definition."""
name: str
mode: str
coordinator: WeatherUpdateCoordinator
@@ -52,7 +53,7 @@ async def async_setup_entry(
else:
async_delete_issue(hass, entry.entry_id)
owm_client = OWMClient(api_key, mode, lang=language)
owm_client = create_owm_client(api_key, mode, lang=language)
weather_coordinator = WeatherUpdateCoordinator(
owm_client, latitude, longitude, hass
)
@@ -61,7 +62,7 @@ async def async_setup_entry(
entry.async_on_unload(entry.add_update_listener(async_update_options))
entry.runtime_data = OpenweathermapData(name, weather_coordinator)
entry.runtime_data = OpenweathermapData(name, mode, weather_coordinator)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
@@ -58,10 +58,17 @@ FORECAST_MODE_DAILY = "daily"
FORECAST_MODE_FREE_DAILY = "freedaily"
FORECAST_MODE_ONECALL_HOURLY = "onecall_hourly"
FORECAST_MODE_ONECALL_DAILY = "onecall_daily"
OWM_MODE_V25 = "v2.5"
OWM_MODE_FREE_CURRENT = "current"
OWM_MODE_FREE_FORECAST = "forecast"
OWM_MODE_V30 = "v3.0"
OWM_MODES = [OWM_MODE_V30, OWM_MODE_V25]
DEFAULT_OWM_MODE = OWM_MODE_V30
OWM_MODE_V25 = "v2.5"
OWM_MODES = [
OWM_MODE_FREE_CURRENT,
OWM_MODE_FREE_FORECAST,
OWM_MODE_V30,
OWM_MODE_V25,
]
DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT
LANGUAGES = [
"af",
@@ -86,8 +86,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
"""Format the weather response correctly."""
_LOGGER.debug("OWM weather response: %s", weather_report)
current_weather = (
self._get_current_weather_data(weather_report.current)
if weather_report.current is not None
else {}
)
return {
ATTR_API_CURRENT: self._get_current_weather_data(weather_report.current),
ATTR_API_CURRENT: current_weather,
ATTR_API_HOURLY_FORECAST: [
self._get_hourly_forecast_weather_data(item)
for item in weather_report.hourly_forecast
@@ -122,6 +128,8 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
}
def _get_hourly_forecast_weather_data(self, forecast: HourlyWeatherForecast):
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
return Forecast(
datetime=forecast.date_time.isoformat(),
condition=self._get_condition(forecast.condition.id),
@@ -134,12 +142,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
wind_speed=forecast.wind_speed,
native_wind_gust_speed=forecast.wind_gust,
wind_bearing=forecast.wind_bearing,
uv_index=float(forecast.uv_index),
uv_index=uv_index,
precipitation_probability=round(forecast.precipitation_probability * 100),
precipitation=self._calc_precipitation(forecast.rain, forecast.snow),
)
def _get_daily_forecast_weather_data(self, forecast: DailyWeatherForecast):
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
return Forecast(
datetime=forecast.date_time.isoformat(),
condition=self._get_condition(forecast.condition.id),
@@ -153,7 +163,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
wind_speed=forecast.wind_speed,
native_wind_gust_speed=forecast.wind_gust,
wind_bearing=forecast.wind_bearing,
uv_index=float(forecast.uv_index),
uv_index=uv_index,
precipitation_probability=round(forecast.precipitation_probability * 100),
precipitation=round(forecast.rain + forecast.snow, 2),
)
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/openweathermap",
"iot_class": "cloud_polling",
"loggers": ["pyopenweathermap"],
"requirements": ["pyopenweathermap==0.0.9"]
"requirements": ["pyopenweathermap==0.1.1"]
}
@@ -19,6 +19,7 @@ from homeassistant.const import (
UnitOfVolumetricFlux,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -47,6 +48,7 @@ from .const import (
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
OWM_MODE_FREE_FORECAST,
)
from .coordinator import WeatherUpdateCoordinator
@@ -161,16 +163,23 @@ async def async_setup_entry(
name = domain_data.name
weather_coordinator = domain_data.coordinator
entities: list[AbstractOpenWeatherMapSensor] = [
OpenWeatherMapSensor(
name,
f"{config_entry.unique_id}-{description.key}",
description,
weather_coordinator,
if domain_data.mode == OWM_MODE_FREE_FORECAST:
entity_registry = er.async_get(hass)
entries = er.async_entries_for_config_entry(
entity_registry, config_entry.entry_id
)
for entry in entries:
entity_registry.async_remove(entry.entity_id)
else:
async_add_entities(
OpenWeatherMapSensor(
name,
f"{config_entry.unique_id}-{description.key}",
description,
weather_coordinator,
)
for description in WEATHER_SENSOR_TYPES
)
for description in WEATHER_SENSOR_TYPES
]
async_add_entities(entities)
class AbstractOpenWeatherMapSensor(SensorEntity):
@@ -2,7 +2,7 @@
from typing import Any
from pyopenweathermap import OWMClient, RequestError
from pyopenweathermap import RequestError, create_owm_client
from homeassistant.const import CONF_LANGUAGE, CONF_MODE
@@ -16,7 +16,7 @@ async def validate_api_key(api_key, mode):
api_key_valid = None
errors, description_placeholders = {}, {}
try:
owm_client = OWMClient(api_key, mode)
owm_client = create_owm_client(api_key, mode)
api_key_valid = await owm_client.validate_key()
except RequestError as error:
errors["base"] = "cannot_connect"
@@ -8,6 +8,7 @@ from homeassistant.components.weather import (
WeatherEntityFeature,
)
from homeassistant.const import (
UnitOfLength,
UnitOfPrecipitationDepth,
UnitOfPressure,
UnitOfSpeed,
@@ -29,6 +30,7 @@ from .const import (
ATTR_API_HUMIDITY,
ATTR_API_PRESSURE,
ATTR_API_TEMPERATURE,
ATTR_API_VISIBILITY_DISTANCE,
ATTR_API_WIND_BEARING,
ATTR_API_WIND_GUST,
ATTR_API_WIND_SPEED,
@@ -36,6 +38,9 @@ from .const import (
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
OWM_MODE_FREE_FORECAST,
OWM_MODE_V25,
OWM_MODE_V30,
)
from .coordinator import WeatherUpdateCoordinator
@@ -48,10 +53,11 @@ async def async_setup_entry(
"""Set up OpenWeatherMap weather entity based on a config entry."""
domain_data = config_entry.runtime_data
name = domain_data.name
mode = domain_data.mode
weather_coordinator = domain_data.coordinator
unique_id = f"{config_entry.unique_id}"
owm_weather = OpenWeatherMapWeather(name, unique_id, weather_coordinator)
owm_weather = OpenWeatherMapWeather(name, unique_id, mode, weather_coordinator)
async_add_entities([owm_weather], False)
@@ -66,11 +72,13 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
_attr_native_pressure_unit = UnitOfPressure.HPA
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
_attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND
_attr_native_visibility_unit = UnitOfLength.METERS
def __init__(
self,
name: str,
unique_id: str,
mode: str,
weather_coordinator: WeatherUpdateCoordinator,
) -> None:
"""Initialize the sensor."""
@@ -83,59 +91,71 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
manufacturer=MANUFACTURER,
name=DEFAULT_NAME,
)
self._attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
)
if mode in (OWM_MODE_V30, OWM_MODE_V25):
self._attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY
| WeatherEntityFeature.FORECAST_HOURLY
)
elif mode == OWM_MODE_FREE_FORECAST:
self._attr_supported_features = WeatherEntityFeature.FORECAST_HOURLY
@property
def condition(self) -> str | None:
"""Return the current condition."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CONDITION]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CONDITION)
@property
def cloud_coverage(self) -> float | None:
"""Return the Cloud coverage in %."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CLOUDS]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CLOUDS)
@property
def native_apparent_temperature(self) -> float | None:
"""Return the apparent temperature."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_FEELS_LIKE_TEMPERATURE]
return self.coordinator.data[ATTR_API_CURRENT].get(
ATTR_API_FEELS_LIKE_TEMPERATURE
)
@property
def native_temperature(self) -> float | None:
"""Return the temperature."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_TEMPERATURE]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_TEMPERATURE)
@property
def native_pressure(self) -> float | None:
"""Return the pressure."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_PRESSURE]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_PRESSURE)
@property
def humidity(self) -> float | None:
"""Return the humidity."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_HUMIDITY]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_HUMIDITY)
@property
def native_dew_point(self) -> float | None:
"""Return the dew point."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_DEW_POINT]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_DEW_POINT)
@property
def native_wind_gust_speed(self) -> float | None:
"""Return the wind gust speed."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_GUST]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_GUST)
@property
def native_wind_speed(self) -> float | None:
"""Return the wind speed."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_SPEED]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_SPEED)
@property
def wind_bearing(self) -> float | str | None:
"""Return the wind bearing."""
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_BEARING]
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_BEARING)
@property
def visibility(self) -> float | str | None:
"""Return visibility."""
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_VISIBILITY_DISTANCE)
@callback
def _async_forecast_daily(self) -> list[Forecast] | None:
+15 -9
View File
@@ -632,7 +632,7 @@ def _update_states_table_with_foreign_key_options(
def _drop_foreign_key_constraints(
session_maker: Callable[[], Session], engine: Engine, table: str, column: str
) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]:
) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]:
"""Drop foreign key constraints for a table on specific columns."""
inspector = sqlalchemy.inspect(engine)
dropped_constraints = [
@@ -649,6 +649,7 @@ def _drop_foreign_key_constraints(
if foreign_key["name"] and foreign_key["constrained_columns"] == [column]
]
fk_remove_ok = True
for drop in drops:
with session_scope(session=session_maker()) as session:
try:
@@ -660,8 +661,9 @@ def _drop_foreign_key_constraints(
TABLE_STATES,
column,
)
fk_remove_ok = False
return dropped_constraints
return fk_remove_ok, dropped_constraints
def _restore_foreign_key_constraints(
@@ -1481,7 +1483,7 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44):
for column in columns
for dropped_constraint in _drop_foreign_key_constraints(
self.session_maker, self.engine, table, column
)
)[1]
]
_LOGGER.debug("Dropped foreign key constraints: %s", dropped_constraints)
@@ -1956,14 +1958,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool:
if instance.dialect_name == SupportedDialect.SQLITE:
# SQLite does not support dropping foreign key constraints
# so we have to rebuild the table
rebuild_sqlite_table(session_maker, instance.engine, States)
fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States)
else:
_drop_foreign_key_constraints(
fk_remove_ok, _ = _drop_foreign_key_constraints(
session_maker, instance.engine, TABLE_STATES, "event_id"
)
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
instance.use_legacy_events_index = False
_mark_migration_done(session, EventIDPostMigration)
if fk_remove_ok:
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
instance.use_legacy_events_index = False
_mark_migration_done(session, EventIDPostMigration)
return True
@@ -2419,6 +2422,7 @@ class EventIDPostMigration(BaseRunTimeMigration):
migration_id = "event_id_post_migration"
task = MigrationTask
migration_version = 2
@staticmethod
def migrate_data(instance: Recorder) -> bool:
@@ -2469,7 +2473,7 @@ def _mark_migration_done(
def rebuild_sqlite_table(
session_maker: Callable[[], Session], engine: Engine, table: type[Base]
) -> None:
) -> bool:
"""Rebuild an SQLite table.
This must only be called after all migrations are complete
@@ -2524,8 +2528,10 @@ def rebuild_sqlite_table(
# Swallow the exception since we do not want to ever raise
# an integrity error as it would cause the database
# to be discarded and recreated from scratch
return False
else:
_LOGGER.warning("Rebuilding SQLite table %s finished", orig_name)
return True
finally:
with session_scope(session=session_maker()) as session:
# Step 12 - Re-enable foreign keys
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
"iot_class": "local_push",
"loggers": ["aiorussound"],
"requirements": ["aiorussound==2.2.0"]
"requirements": ["aiorussound==2.2.2"]
}
@@ -13,6 +13,7 @@ from tesla_fleet_api.exceptions import (
TeslaFleetError,
)
from homeassistant.components.application_credentials import ClientCredential
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform
from homeassistant.core import HomeAssistant
@@ -26,7 +27,9 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import DeviceInfo
from .const import DOMAIN, LOGGER, MODELS
from .application_credentials import TeslaOAuth2Implementation
from .config_flow import OAuth2FlowHandler
from .const import CLIENT_ID, DOMAIN, LOGGER, MODELS, NAME
from .coordinator import (
TeslaFleetEnergySiteInfoCoordinator,
TeslaFleetEnergySiteLiveCoordinator,
@@ -51,6 +54,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
scopes = token["scp"]
region = token["ou_code"].lower()
OAuth2FlowHandler.async_register_implementation(
hass,
TeslaOAuth2Implementation(hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)),
)
implementation = await async_get_config_entry_implementation(hass, entry)
oauth_session = OAuth2Session(hass, entry, implementation)
refresh_lock = asyncio.Lock()
@@ -5,15 +5,17 @@ import hashlib
import secrets
from typing import Any
from homeassistant.components.application_credentials import ClientCredential
from homeassistant.components.application_credentials import (
AuthImplementation,
AuthorizationServer,
ClientCredential,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from .const import DOMAIN, SCOPES
from .const import AUTHORIZE_URL, DOMAIN, SCOPES, TOKEN_URL
CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d"
AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize"
TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token"
AUTH_SERVER = AuthorizationServer(AUTHORIZE_URL, TOKEN_URL)
async def async_get_auth_implementation(
@@ -23,15 +25,16 @@ async def async_get_auth_implementation(
return TeslaOAuth2Implementation(
hass,
DOMAIN,
credential,
)
class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementation):
class TeslaOAuth2Implementation(AuthImplementation):
"""Tesla Fleet API Open Source Oauth2 implementation."""
_name = "Tesla Fleet API"
def __init__(self, hass: HomeAssistant, domain: str) -> None:
def __init__(
self, hass: HomeAssistant, domain: str, credential: ClientCredential
) -> None:
"""Initialize local auth implementation."""
self.hass = hass
self._domain = domain
@@ -45,10 +48,8 @@ class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementati
super().__init__(
hass,
domain,
CLIENT_ID,
"", # Implementation has no client secret
AUTHORIZE_URL,
TOKEN_URL,
credential,
AUTH_SERVER,
)
@property
@@ -8,10 +8,12 @@ from typing import Any
import jwt
from homeassistant.components.application_credentials import ClientCredential
from homeassistant.config_entries import ConfigEntry, ConfigFlowResult
from homeassistant.helpers import config_entry_oauth2_flow
from .const import DOMAIN, LOGGER
from .application_credentials import TeslaOAuth2Implementation
from .const import CLIENT_ID, DOMAIN, LOGGER, NAME
class OAuth2FlowHandler(
@@ -27,6 +29,19 @@ class OAuth2FlowHandler(
"""Return logger."""
return LOGGER
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow start."""
self.async_register_implementation(
self.hass,
TeslaOAuth2Implementation(
self.hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)
),
)
return await super().async_step_user()
async def async_oauth_create_entry(
self,
data: dict[str, Any],
@@ -13,6 +13,11 @@ CONF_REFRESH_TOKEN = "refresh_token"
LOGGER = logging.getLogger(__package__)
NAME = "Home Assistant"
CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d"
AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize"
TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token"
SCOPES = [
Scope.OPENID,
Scope.OFFLINE_ACCESS,
@@ -5,7 +5,7 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tessie",
"iot_class": "cloud_polling",
"loggers": ["tessie"],
"loggers": ["tessie", "tesla-fleet-api"],
"quality_scale": "platinum",
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.7.3"]
}
@@ -55,12 +55,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]):
@property
def limit(self) -> int:
"""Return limit."""
return self.config_entry.data.get(CONF_LIMIT, DEFAULT_LIMIT)
return self.config_entry.options.get(CONF_LIMIT, DEFAULT_LIMIT)
@property
def order(self) -> str:
"""Return order."""
return self.config_entry.data.get(CONF_ORDER, DEFAULT_ORDER)
return self.config_entry.options.get(CONF_ORDER, DEFAULT_ORDER)
async def _async_update_data(self) -> SessionStats:
"""Update transmission data."""
+1 -1
View File
@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "platinum",
"requirements": ["wled==0.20.0"],
"requirements": ["wled==0.20.1"],
"zeroconf": ["_wled._tcp.local."]
}
@@ -6,5 +6,5 @@
"dependencies": ["auth", "application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/yolink",
"iot_class": "cloud_push",
"requirements": ["yolink-api==0.4.6"]
"requirements": ["yolink-api==0.4.7"]
}
+17 -2
View File
@@ -2,6 +2,7 @@
import contextlib
import logging
from zoneinfo import ZoneInfo
import voluptuous as vol
from zha.application.const import BAUD_RATES, RadioType
@@ -12,8 +13,13 @@ from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH
from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import Event, HomeAssistant
from homeassistant.const import (
CONF_TYPE,
EVENT_CORE_CONFIG_UPDATE,
EVENT_HOMEASSISTANT_STOP,
Platform,
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
import homeassistant.helpers.config_validation as cv
@@ -204,6 +210,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown)
)
@callback
def update_config(event: Event) -> None:
"""Handle Core config update."""
zha_gateway.config.local_timezone = ZoneInfo(hass.config.time_zone)
config_entry.async_on_unload(
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, update_config)
)
await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities()
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
+2
View File
@@ -15,6 +15,7 @@ import re
import time
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast
from zoneinfo import ZoneInfo
import voluptuous as vol
from zha.application.const import (
@@ -1273,6 +1274,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData:
quirks_configuration=quirks_config,
device_overrides=overrides_config,
),
local_timezone=ZoneInfo(hass.config.time_zone),
)
+1 -1
View File
@@ -21,7 +21,7 @@
"zha",
"universal_silabs_flasher"
],
"requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"],
"requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.30"],
"usb": [
{
"vid": "10C4",
+3 -11
View File
@@ -817,9 +817,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non
This method is a coroutine.
"""
# CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir
# so we need to run it in an executor job.
config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config)
config = CORE_CONFIG_SCHEMA(config)
# Only load auth during startup.
if not hasattr(hass, "auth"):
@@ -1535,15 +1533,9 @@ async def async_process_component_config(
return IntegrationConfigInfo(None, config_exceptions)
# No custom config validator, proceed with schema validation
if config_schema := getattr(component, "CONFIG_SCHEMA", None):
if hasattr(component, "CONFIG_SCHEMA"):
try:
if domain in config:
# cv.isdir, cv.isfile, cv.isdevice are not async
# friendly so we need to run this in executor
schema = await hass.async_add_executor_job(config_schema, config)
else:
schema = config_schema(config)
return IntegrationConfigInfo(schema, [])
return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), [])
except vol.Invalid as exc:
exc_info = ConfigExceptionInfo(
exc,
+1 -1
View File
@@ -24,7 +24,7 @@ if TYPE_CHECKING:
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2024
MINOR_VERSION: Final = 8
PATCH_VERSION: Final = "0b7"
PATCH_VERSION: Final = "1"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
+3 -3
View File
@@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2
aiodiscover==2.1.0
aiodns==3.2.0
aiohttp-fast-zlib==0.1.1
aiohttp==3.10.1
aiohttp==3.10.2
aiohttp_cors==0.7.0
aiozoneinfo==0.2.1
astral==2.2
@@ -31,8 +31,8 @@ habluetooth==3.1.3
hass-nabucasa==0.81.1
hassil==1.7.4
home-assistant-bluetooth==1.12.2
home-assistant-frontend==20240806.1
home-assistant-intents==2024.7.29
home-assistant-frontend==20240809.0
home-assistant-intents==2024.8.7
httpx==0.27.0
ifaddr==0.2.0
Jinja2==3.1.4
+2 -2
View File
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2024.8.0b7"
version = "2024.8.1"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@@ -24,7 +24,7 @@ classifiers = [
requires-python = ">=3.12.0"
dependencies = [
"aiodns==3.2.0",
"aiohttp==3.10.1",
"aiohttp==3.10.2",
"aiohttp_cors==0.7.0",
"aiohttp-fast-zlib==0.1.1",
"aiozoneinfo==0.2.1",
+1 -1
View File
@@ -4,7 +4,7 @@
# Home Assistant Core
aiodns==3.2.0
aiohttp==3.10.1
aiohttp==3.10.2
aiohttp_cors==0.7.0
aiohttp-fast-zlib==0.1.1
aiozoneinfo==0.2.1
+17 -14
View File
@@ -350,7 +350,7 @@ aioridwell==2024.01.0
aioruckus==0.34
# homeassistant.components.russound_rio
aiorussound==2.2.0
aiorussound==2.2.2
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -410,7 +410,7 @@ aiowithings==3.0.2
aioymaps==1.2.5
# homeassistant.components.airgradient
airgradient==0.7.1
airgradient==0.8.0
# homeassistant.components.airly
airly==1.1.0
@@ -660,6 +660,9 @@ clearpasspy==1.0.2
# homeassistant.components.sinch
clx-sdk-xms==1.0.0
# homeassistant.components.coinbase
coinbase-advanced-py==1.2.2
# homeassistant.components.coinbase
coinbase==2.1.0
@@ -732,7 +735,7 @@ devolo-home-control-api==0.18.3
devolo-plc-api==1.4.1
# homeassistant.components.chacon_dio
dio-chacon-wifi-api==1.1.0
dio-chacon-wifi-api==1.2.0
# homeassistant.components.directv
directv==0.4.0
@@ -1093,10 +1096,10 @@ hole==0.8.0
holidays==0.53
# homeassistant.components.frontend
home-assistant-frontend==20240806.1
home-assistant-frontend==20240809.0
# homeassistant.components.conversation
home-assistant-intents==2024.7.29
home-assistant-intents==2024.8.7
# homeassistant.components.home_connect
homeconnect==0.8.0
@@ -1351,7 +1354,7 @@ moat-ble==0.1.1
moehlenhoff-alpha2==1.3.1
# homeassistant.components.monzo
monzopy==1.3.0
monzopy==1.3.2
# homeassistant.components.mopeka
mopeka-iot-ble==0.8.0
@@ -1777,7 +1780,7 @@ pycmus==0.1.1
pycomfoconnect==0.5.1
# homeassistant.components.coolmaster
pycoolmasternet-async==0.2.0
pycoolmasternet-async==0.1.5
# homeassistant.components.microsoft
pycsspeechtts==1.0.8
@@ -1786,7 +1789,7 @@ pycsspeechtts==1.0.8
# pycups==1.9.73
# homeassistant.components.daikin
pydaikin==2.13.1
pydaikin==2.13.2
# homeassistant.components.danfoss_air
pydanfossair==0.1.0
@@ -1942,7 +1945,7 @@ pyisy==3.1.14
pyitachip2ir==0.0.7
# homeassistant.components.jvc_projector
pyjvcprojector==1.0.11
pyjvcprojector==1.0.12
# homeassistant.components.kaleidescape
pykaleidescape==1.0.1
@@ -2068,7 +2071,7 @@ pyombi==0.1.10
pyopenuv==2023.02.0
# homeassistant.components.openweathermap
pyopenweathermap==0.0.9
pyopenweathermap==0.1.1
# homeassistant.components.opnsense
pyopnsense==0.4.0
@@ -2280,7 +2283,7 @@ python-gitlab==1.6.0
python-homeassistant-analytics==0.7.0
# homeassistant.components.homewizard
python-homewizard-energy==v6.1.1
python-homewizard-energy==v6.2.0
# homeassistant.components.hp_ilo
python-hpilo==4.4.3
@@ -2915,7 +2918,7 @@ wiffi==1.1.2
wirelesstagpy==0.8.1
# homeassistant.components.wled
wled==0.20.0
wled==0.20.1
# homeassistant.components.wolflink
wolf-comm==0.0.9
@@ -2962,7 +2965,7 @@ yeelight==0.7.14
yeelightsunflower==0.0.10
# homeassistant.components.yolink
yolink-api==0.4.6
yolink-api==0.4.7
# homeassistant.components.youless
youless-api==2.1.2
@@ -2986,7 +2989,7 @@ zeroconf==0.132.2
zeversolar==0.3.1
# homeassistant.components.zha
zha==0.0.28
zha==0.0.30
# homeassistant.components.zhong_hong
zhong-hong-hvac==1.0.12
+17 -14
View File
@@ -332,7 +332,7 @@ aioridwell==2024.01.0
aioruckus==0.34
# homeassistant.components.russound_rio
aiorussound==2.2.0
aiorussound==2.2.2
# homeassistant.components.ruuvi_gateway
aioruuvigateway==0.1.0
@@ -392,7 +392,7 @@ aiowithings==3.0.2
aioymaps==1.2.5
# homeassistant.components.airgradient
airgradient==0.7.1
airgradient==0.8.0
# homeassistant.components.airly
airly==1.1.0
@@ -562,6 +562,9 @@ cached_ipaddress==0.3.0
# homeassistant.components.caldav
caldav==1.3.9
# homeassistant.components.coinbase
coinbase-advanced-py==1.2.2
# homeassistant.components.coinbase
coinbase==2.1.0
@@ -625,7 +628,7 @@ devolo-home-control-api==0.18.3
devolo-plc-api==1.4.1
# homeassistant.components.chacon_dio
dio-chacon-wifi-api==1.1.0
dio-chacon-wifi-api==1.2.0
# homeassistant.components.directv
directv==0.4.0
@@ -916,10 +919,10 @@ hole==0.8.0
holidays==0.53
# homeassistant.components.frontend
home-assistant-frontend==20240806.1
home-assistant-frontend==20240809.0
# homeassistant.components.conversation
home-assistant-intents==2024.7.29
home-assistant-intents==2024.8.7
# homeassistant.components.home_connect
homeconnect==0.8.0
@@ -1117,7 +1120,7 @@ moat-ble==0.1.1
moehlenhoff-alpha2==1.3.1
# homeassistant.components.monzo
monzopy==1.3.0
monzopy==1.3.2
# homeassistant.components.mopeka
mopeka-iot-ble==0.8.0
@@ -1427,13 +1430,13 @@ pycfdns==3.0.0
pycomfoconnect==0.5.1
# homeassistant.components.coolmaster
pycoolmasternet-async==0.2.0
pycoolmasternet-async==0.1.5
# homeassistant.components.microsoft
pycsspeechtts==1.0.8
# homeassistant.components.daikin
pydaikin==2.13.1
pydaikin==2.13.2
# homeassistant.components.deconz
pydeconz==116
@@ -1547,7 +1550,7 @@ pyiss==1.0.1
pyisy==3.1.14
# homeassistant.components.jvc_projector
pyjvcprojector==1.0.11
pyjvcprojector==1.0.12
# homeassistant.components.kaleidescape
pykaleidescape==1.0.1
@@ -1652,7 +1655,7 @@ pyoctoprintapi==0.1.12
pyopenuv==2023.02.0
# homeassistant.components.openweathermap
pyopenweathermap==0.0.9
pyopenweathermap==0.1.1
# homeassistant.components.opnsense
pyopnsense==0.4.0
@@ -1804,7 +1807,7 @@ python-fullykiosk==0.0.14
python-homeassistant-analytics==0.7.0
# homeassistant.components.homewizard
python-homewizard-energy==v6.1.1
python-homewizard-energy==v6.2.0
# homeassistant.components.izone
python-izone==1.2.9
@@ -2298,7 +2301,7 @@ whois==0.9.27
wiffi==1.1.2
# homeassistant.components.wled
wled==0.20.0
wled==0.20.1
# homeassistant.components.wolflink
wolf-comm==0.0.9
@@ -2339,7 +2342,7 @@ yalexs==6.4.3
yeelight==0.7.14
# homeassistant.components.yolink
yolink-api==0.4.6
yolink-api==0.4.7
# homeassistant.components.youless
youless-api==2.1.2
@@ -2360,7 +2363,7 @@ zeroconf==0.132.2
zeversolar==0.3.1
# homeassistant.components.zha
zha==0.0.28
zha==0.0.30
# homeassistant.components.zwave_js
zwave-js-server-python==0.57.0
+67 -1
View File
@@ -5,13 +5,14 @@ from homeassistant.components.coinbase.const import (
CONF_EXCHANGE_RATES,
DOMAIN,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from .const import (
GOOD_CURRENCY_2,
GOOD_EXCHANGE_RATE,
GOOD_EXCHANGE_RATE_2,
MOCK_ACCOUNTS_RESPONSE,
MOCK_ACCOUNTS_RESPONSE_V3,
)
from tests.common import MockConfigEntry
@@ -54,6 +55,33 @@ def mocked_get_accounts(_, **kwargs):
return MockGetAccounts(**kwargs)
class MockGetAccountsV3:
"""Mock accounts with pagination."""
def __init__(self, cursor=""):
"""Init mocked object, forced to return two at a time."""
ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3]
start = ids.index(cursor) if cursor else 0
has_next = (target_end := start + 2) < len(MOCK_ACCOUNTS_RESPONSE_V3)
end = target_end if has_next else -1
next_cursor = ids[end] if has_next else ids[-1]
self.accounts = {
"accounts": MOCK_ACCOUNTS_RESPONSE_V3[start:end],
"has_next": has_next,
"cursor": next_cursor,
}
def __getitem__(self, item):
"""Handle subscript request."""
return self.accounts[item]
def mocked_get_accounts_v3(_, **kwargs):
"""Return simplified accounts using mock."""
return MockGetAccountsV3(**kwargs)
def mock_get_current_user():
"""Return a simplified mock user."""
return {
@@ -74,6 +102,19 @@ def mock_get_exchange_rates():
}
def mock_get_portfolios():
"""Return a mocked list of Coinbase portfolios."""
return {
"portfolios": [
{
"name": "Default",
"uuid": "123456",
"type": "DEFAULT",
}
]
}
async def init_mock_coinbase(hass, currencies=None, rates=None):
"""Init Coinbase integration for testing."""
config_entry = MockConfigEntry(
@@ -93,3 +134,28 @@ async def init_mock_coinbase(hass, currencies=None, rates=None):
await hass.async_block_till_done()
return config_entry
async def init_mock_coinbase_v3(hass, currencies=None, rates=None):
"""Init Coinbase integration for testing."""
config_entry = MockConfigEntry(
domain=DOMAIN,
entry_id="080272b77a4f80c41b94d7cdc86fd826",
unique_id=None,
title="Test User v3",
data={
CONF_API_KEY: "organizations/123456",
CONF_API_TOKEN: "AbCDeF",
CONF_API_VERSION: "v3",
},
options={
CONF_CURRENCIES: currencies or [],
CONF_EXCHANGE_RATES: rates or [],
},
)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
+28
View File
@@ -31,3 +31,31 @@ MOCK_ACCOUNTS_RESPONSE = [
"type": "fiat",
},
]
MOCK_ACCOUNTS_RESPONSE_V3 = [
{
"uuid": "123456789",
"name": "BTC Wallet",
"currency": GOOD_CURRENCY,
"available_balance": {"value": "0.00001", "currency": GOOD_CURRENCY},
"type": "ACCOUNT_TYPE_CRYPTO",
"hold": {"value": "0", "currency": GOOD_CURRENCY},
},
{
"uuid": "abcdefg",
"name": "BTC Vault",
"currency": GOOD_CURRENCY,
"available_balance": {"value": "100.00", "currency": GOOD_CURRENCY},
"type": "ACCOUNT_TYPE_VAULT",
"hold": {"value": "0", "currency": GOOD_CURRENCY},
},
{
"uuid": "987654321",
"name": "USD Wallet",
"currency": GOOD_CURRENCY_2,
"available_balance": {"value": "9.90", "currency": GOOD_CURRENCY_2},
"type": "ACCOUNT_TYPE_FIAT",
"ready": True,
"hold": {"value": "0", "currency": GOOD_CURRENCY_2},
},
]
@@ -3,40 +3,25 @@
dict({
'accounts': list([
dict({
'balance': dict({
'amount': '**REDACTED**',
'currency': 'BTC',
}),
'currency': dict({
'code': 'BTC',
}),
'amount': '**REDACTED**',
'currency': 'BTC',
'id': '**REDACTED**',
'is_vault': False,
'name': 'BTC Wallet',
'type': 'wallet',
}),
dict({
'balance': dict({
'amount': '**REDACTED**',
'currency': 'BTC',
}),
'currency': dict({
'code': 'BTC',
}),
'amount': '**REDACTED**',
'currency': 'BTC',
'id': '**REDACTED**',
'is_vault': True,
'name': 'BTC Vault',
'type': 'vault',
}),
dict({
'balance': dict({
'amount': '**REDACTED**',
'currency': 'USD',
}),
'currency': dict({
'code': 'USD',
}),
'amount': '**REDACTED**',
'currency': 'USD',
'id': '**REDACTED**',
'is_vault': False,
'name': 'USD Wallet',
'type': 'fiat',
}),
]),
'entry': dict({
+84 -6
View File
@@ -14,15 +14,18 @@ from homeassistant.components.coinbase.const import (
CONF_EXCHANGE_RATES,
DOMAIN,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from .common import (
init_mock_coinbase,
init_mock_coinbase_v3,
mock_get_current_user,
mock_get_exchange_rates,
mock_get_portfolios,
mocked_get_accounts,
mocked_get_accounts_v3,
)
from .const import BAD_CURRENCY, BAD_EXCHANGE_RATE, GOOD_CURRENCY, GOOD_EXCHANGE_RATE
@@ -53,16 +56,17 @@ async def test_form(hass: HomeAssistant) -> None:
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_API_KEY: "123456",
CONF_API_TOKEN: "AbCDeF",
},
{CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"},
)
await hass.async_block_till_done()
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "Test User"
assert result2["data"] == {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"}
assert result2["data"] == {
CONF_API_KEY: "123456",
CONF_API_TOKEN: "AbCDeF",
CONF_API_VERSION: "v2",
}
assert len(mock_setup_entry.mock_calls) == 1
@@ -314,3 +318,77 @@ async def test_option_catch_all_exception(hass: HomeAssistant) -> None:
assert result2["type"] is FlowResultType.FORM
assert result2["errors"] == {"base": "unknown"}
async def test_form_v3(hass: HomeAssistant) -> None:
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
with (
patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3),
patch(
"coinbase.rest.RESTClient.get_portfolios",
return_value=mock_get_portfolios(),
),
patch(
"coinbase.rest.RESTBase.get",
return_value={"data": mock_get_exchange_rates()},
),
patch(
"homeassistant.components.coinbase.async_setup_entry",
return_value=True,
) as mock_setup_entry,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "organizations/123456", CONF_API_TOKEN: "AbCDeF"},
)
await hass.async_block_till_done()
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "Default"
assert result2["data"] == {
CONF_API_KEY: "organizations/123456",
CONF_API_TOKEN: "AbCDeF",
CONF_API_VERSION: "v3",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_option_form_v3(hass: HomeAssistant) -> None:
"""Test we handle a good wallet currency option."""
with (
patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3),
patch(
"coinbase.rest.RESTClient.get_portfolios",
return_value=mock_get_portfolios(),
),
patch(
"coinbase.rest.RESTBase.get",
return_value={"data": mock_get_exchange_rates()},
),
patch(
"homeassistant.components.coinbase.update_listener"
) as mock_update_listener,
):
config_entry = await init_mock_coinbase_v3(hass)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
await hass.async_block_till_done()
result2 = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_CURRENCIES: [GOOD_CURRENCY],
CONF_EXCHANGE_RATES: [GOOD_EXCHANGE_RATE],
CONF_EXCHANGE_PRECISION: 5,
},
)
assert result2["type"] is FlowResultType.CREATE_ENTRY
await hass.async_block_till_done()
assert len(mock_update_listener.mock_calls) == 1
@@ -7,6 +7,10 @@
"1": {
"title": "Home Assistant (mydoorbird_motion)",
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD"
},
"2": {
"title": "externally added event",
"value": "http://127.0.0.1/"
}
}
}
+1 -1
View File
@@ -49,4 +49,4 @@ async def test_reset_favorites_button(
DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True
)
assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN
assert doorbird_entry.api.delete_favorite.call_count == 2
assert doorbird_entry.api.delete_favorite.call_count == 3
@@ -219,6 +219,101 @@ async def test_migrate_hourly_gas_to_mbus(
)
async def test_migrate_gas_with_devid_to_mbus(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
device_registry: dr.DeviceRegistry,
dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock],
) -> None:
"""Test migration of unique_id."""
(connection_factory, transport, protocol) = dsmr_connection_fixture
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="/dev/ttyUSB0",
data={
"port": "/dev/ttyUSB0",
"dsmr_version": "5B",
"serial_id": "1234",
"serial_id_gas": "37464C4F32313139303333373331",
},
options={
"time_between_update": 0,
},
)
mock_entry.add_to_hass(hass)
old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading"
device = device_registry.async_get_or_create(
config_entry_id=mock_entry.entry_id,
identifiers={(DOMAIN, "37464C4F32313139303333373331")},
name="Gas Meter",
)
await hass.async_block_till_done()
entity: er.RegistryEntry = entity_registry.async_get_or_create(
suggested_object_id="gas_meter_reading",
disabled_by=None,
domain=SENSOR_DOMAIN,
platform=DOMAIN,
device_id=device.id,
unique_id=old_unique_id,
config_entry=mock_entry,
)
assert entity.unique_id == old_unique_id
await hass.async_block_till_done()
telegram = Telegram()
telegram.add(
MBUS_DEVICE_TYPE,
CosemObject((0, 1), [{"value": "003", "unit": ""}]),
"MBUS_DEVICE_TYPE",
)
telegram.add(
MBUS_EQUIPMENT_IDENTIFIER,
CosemObject(
(0, 1),
[{"value": "37464C4F32313139303333373331", "unit": ""}],
),
"MBUS_EQUIPMENT_IDENTIFIER",
)
telegram.add(
MBUS_METER_READING,
MBusObject(
(0, 1),
[
{"value": datetime.datetime.fromtimestamp(1551642213)},
{"value": Decimal(745.695), "unit": "m3"},
],
),
"MBUS_METER_READING",
)
assert await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
telegram_callback = connection_factory.call_args_list[0][0][2]
# simulate a telegram pushed from the smartmeter and parsed by dsmr_parser
telegram_callback(telegram)
# after receiving telegram entities need to have the chance to be created
await hass.async_block_till_done()
assert (
entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id)
is None
)
assert (
entity_registry.async_get_entity_id(
SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331"
)
== "sensor.gas_meter_reading"
)
async def test_migrate_gas_to_mbus_exists(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
+37 -2
View File
@@ -122,7 +122,7 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No
"services": [
{"service": "test_service1"},
{
"service": "test_service2",
"action": "test_service2",
"data": {
"target": "unnamed device",
"data": {"test": "message", "default": "default"},
@@ -202,6 +202,41 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No
)
async def test_invalid_configuration(
hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture
) -> None:
"""Test failing to set up group with an invalid configuration."""
assert await async_setup_component(
hass,
"group",
{},
)
await hass.async_block_till_done()
group_setup = [
{
"platform": "group",
"name": "My invalid notification group",
"services": [
{
"service": "test_service1",
"action": "test_service2",
"data": {
"target": "unnamed device",
"data": {"test": "message", "default": "default"},
},
},
],
}
]
await help_setup_notify(hass, tmp_path, {"service1": 1, "service2": 2}, group_setup)
assert not hass.services.has_service("notify", "my_invalid_notification_group")
assert (
"Invalid config for 'notify' from integration 'group':"
" Cannot specify both 'service' and 'action'." in caplog.text
)
async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None:
"""Verify we can reload the notify service."""
assert await async_setup_component(
@@ -219,7 +254,7 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None:
{
"name": "group_notify",
"platform": "group",
"services": [{"service": "test_service1"}],
"services": [{"action": "test_service1"}],
}
],
)
-32
View File
@@ -25,16 +25,6 @@ async def dimmable_light_node_fixture(
)
@pytest.fixture(name="microwave_oven_node")
async def microwave_oven_node_fixture(
hass: HomeAssistant, matter_client: MagicMock
) -> MatterNode:
"""Fixture for a microwave oven node."""
return await setup_integration_with_node_fixture(
hass, "microwave-oven", matter_client
)
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_mode_select_entities(
@@ -87,28 +77,6 @@ async def test_mode_select_entities(
# This tests needs to be adjusted to remove lingering tasks
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_microwave_select_entities(
hass: HomeAssistant,
matter_client: MagicMock,
microwave_oven_node: MatterNode,
) -> None:
"""Test select entities are created for the MicrowaveOvenMode cluster attributes."""
state = hass.states.get("select.microwave_oven_mode")
assert state
assert state.state == "Normal"
assert state.attributes["options"] == [
"Normal",
"Defrost",
]
# name should just be Mode (from the translation key)
assert state.attributes["friendly_name"] == "Microwave Oven Mode"
set_node_attribute(microwave_oven_node, 1, 94, 1, 1)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("select.microwave_oven_mode")
assert state.state == "Defrost"
@pytest.mark.parametrize("expected_lingering_tasks", [True])
async def test_attribute_select_entities(
hass: HomeAssistant,
@@ -45,7 +45,7 @@ CONFIG = {
VALID_YAML_CONFIG = {CONF_API_KEY: "foo"}
def _create_mocked_owm_client(is_valid: bool):
def _create_mocked_owm_factory(is_valid: bool):
current_weather = CurrentWeather(
date_time=datetime.fromtimestamp(1714063536, tz=UTC),
temperature=6.84,
@@ -118,18 +118,18 @@ def _create_mocked_owm_client(is_valid: bool):
def mock_owm_client():
"""Mock config_flow OWMClient."""
with patch(
"homeassistant.components.openweathermap.OWMClient",
) as owm_client_mock:
yield owm_client_mock
"homeassistant.components.openweathermap.create_owm_client",
) as mock:
yield mock
@pytest.fixture(name="config_flow_owm_client_mock")
def mock_config_flow_owm_client():
"""Mock config_flow OWMClient."""
with patch(
"homeassistant.components.openweathermap.utils.OWMClient",
) as config_flow_owm_client_mock:
yield config_flow_owm_client_mock
"homeassistant.components.openweathermap.utils.create_owm_client",
) as mock:
yield mock
async def test_successful_config_flow(
@@ -138,7 +138,7 @@ async def test_successful_config_flow(
config_flow_owm_client_mock,
) -> None:
"""Test that the form is served with valid input."""
mock = _create_mocked_owm_client(True)
mock = _create_mocked_owm_factory(True)
owm_client_mock.return_value = mock
config_flow_owm_client_mock.return_value = mock
@@ -177,7 +177,7 @@ async def test_abort_config_flow(
config_flow_owm_client_mock,
) -> None:
"""Test that the form is served with same data."""
mock = _create_mocked_owm_client(True)
mock = _create_mocked_owm_factory(True)
owm_client_mock.return_value = mock
config_flow_owm_client_mock.return_value = mock
@@ -200,7 +200,7 @@ async def test_config_flow_options_change(
config_flow_owm_client_mock,
) -> None:
"""Test that the options form."""
mock = _create_mocked_owm_client(True)
mock = _create_mocked_owm_factory(True)
owm_client_mock.return_value = mock
config_flow_owm_client_mock.return_value = mock
@@ -261,7 +261,7 @@ async def test_form_invalid_api_key(
config_flow_owm_client_mock,
) -> None:
"""Test that the form is served with no input."""
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(False)
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(False)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
@@ -269,7 +269,7 @@ async def test_form_invalid_api_key(
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": "invalid_api_key"}
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True)
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=CONFIG
)
@@ -282,7 +282,7 @@ async def test_form_api_call_error(
config_flow_owm_client_mock,
) -> None:
"""Test setting up with api call error."""
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True)
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True)
config_flow_owm_client_mock.side_effect = RequestError("oops")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
+7 -7
View File
@@ -748,7 +748,7 @@ def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None:
session.add(States(state="on"))
session.commit()
migration.rebuild_sqlite_table(session_maker, engine, States)
assert migration.rebuild_sqlite_table(session_maker, engine, States) is True
with session_scope(session=session_maker()) as session:
assert session.query(States).count() == 1
@@ -776,13 +776,13 @@ def test_rebuild_sqlite_states_table_missing_fails(
session.connection().execute(text("DROP TABLE states"))
session.commit()
migration.rebuild_sqlite_table(session_maker, engine, States)
assert migration.rebuild_sqlite_table(session_maker, engine, States) is False
assert "Error recreating SQLite table states" in caplog.text
caplog.clear()
# Now rebuild the events table to make sure the database did not
# get corrupted
migration.rebuild_sqlite_table(session_maker, engine, Events)
assert migration.rebuild_sqlite_table(session_maker, engine, Events) is True
with session_scope(session=session_maker()) as session:
assert session.query(Events).count() == 1
@@ -812,7 +812,7 @@ def test_rebuild_sqlite_states_table_extra_columns(
text("ALTER TABLE states ADD COLUMN extra_column TEXT")
)
migration.rebuild_sqlite_table(session_maker, engine, States)
assert migration.rebuild_sqlite_table(session_maker, engine, States) is True
assert "Error recreating SQLite table states" not in caplog.text
with session_scope(session=session_maker()) as session:
@@ -905,7 +905,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
for table, column in constraints_to_recreate
for dropped_constraint in migration._drop_foreign_key_constraints(
session_maker, engine, table, column
)
)[1]
]
assert dropped_constraints_1 == expected_dropped_constraints[db_engine]
@@ -917,7 +917,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
for table, column in constraints_to_recreate
for dropped_constraint in migration._drop_foreign_key_constraints(
session_maker, engine, table, column
)
)[1]
]
assert dropped_constraints_2 == []
@@ -936,7 +936,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
for table, column in constraints_to_recreate
for dropped_constraint in migration._drop_foreign_key_constraints(
session_maker, engine, table, column
)
)[1]
]
assert dropped_constraints_3 == expected_dropped_constraints[db_engine]
@@ -7,6 +7,7 @@ from unittest.mock import patch
import pytest
from sqlalchemy import create_engine, inspect
from sqlalchemy.exc import OperationalError, SQLAlchemyError
from sqlalchemy.orm import Session
from homeassistant.components import recorder
@@ -444,3 +445,348 @@ async def test_migrate_can_resume_ix_states_event_id_removed(
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
await hass.async_stop()
@pytest.mark.usefixtures("skip_by_db_engine")
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
@pytest.mark.parametrize("enable_migrate_event_ids", [True])
@pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_out_of_disk_space_while_rebuild_states_table(
async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
recorder_db_url: str,
) -> None:
"""Test that we can recover from out of disk space while rebuilding the states table.
This case tests the migration still happens if
ix_states_event_id is removed from the states table.
"""
importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE]
now = dt_util.utcnow()
one_second_past = now - timedelta(seconds=1)
mock_state = State(
"sensor.test",
"old",
{"last_reset": now.isoformat()},
last_changed=one_second_past,
last_updated=now,
)
state_changed_event = Event(
EVENT_STATE_CHANGED,
{
"entity_id": "sensor.test",
"old_state": None,
"new_state": mock_state,
},
EventOrigin.local,
time_fired_timestamp=now.timestamp(),
)
custom_event = Event(
"custom_event",
{"entity_id": "sensor.custom"},
EventOrigin.local,
time_fired_timestamp=now.timestamp(),
)
number_of_migrations = 5
def _get_event_id_foreign_keys():
assert instance.engine is not None
return next(
(
fk # type: ignore[misc]
for fk in inspect(instance.engine).get_foreign_keys("states")
if fk["constrained_columns"] == ["event_id"]
),
None,
)
def _get_states_index_names():
with session_scope(hass=hass) as session:
return inspect(session.connection()).get_indexes("states")
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
),
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
patch.object(core, "EventTypes", old_db_schema.EventTypes),
patch.object(core, "EventData", old_db_schema.EventData),
patch.object(core, "States", old_db_schema.States),
patch.object(core, "Events", old_db_schema.Events),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"),
patch(
"homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids"
),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
def _add_data():
with session_scope(hass=hass) as session:
session.add(old_db_schema.Events.from_event(custom_event))
session.add(old_db_schema.States.from_event(state_changed_event))
await instance.async_add_executor_job(_add_data)
await hass.async_block_till_done()
await instance.async_block_till_done()
await instance.async_add_executor_job(
migration._drop_index,
instance.get_session,
"states",
"ix_states_event_id",
)
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is True
assert (
await instance.async_add_executor_job(_get_event_id_foreign_keys)
is not None
)
await hass.async_stop()
await hass.async_block_till_done()
assert "ix_states_entity_id_last_updated_ts" in states_index_names
# Simulate out of disk space while rebuilding the states table by
# - patching CreateTable to raise SQLAlchemyError for SQLite
# - patching DropConstraint to raise InternalError for MySQL and PostgreSQL
with (
patch(
"homeassistant.components.recorder.migration.CreateTable",
side_effect=SQLAlchemyError,
),
patch(
"homeassistant.components.recorder.migration.DropConstraint",
side_effect=OperationalError(
None, None, OSError("No space left on device")
),
),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await instance.async_block_till_done()
await async_wait_recording_done(hass)
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is True
assert "Error recreating SQLite table states" in caplog.text
assert await instance.async_add_executor_job(_get_event_id_foreign_keys)
await hass.async_stop()
# Now run it again to verify the table rebuild tries again
caplog.clear()
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await instance.async_block_till_done()
await async_wait_recording_done(hass)
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is False
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
assert "ix_states_event_id" not in states_index_names
assert "Rebuilding SQLite table states finished" in caplog.text
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
await hass.async_stop()
@pytest.mark.usefixtures("skip_by_db_engine")
@pytest.mark.skip_on_db_engine(["sqlite"])
@pytest.mark.parametrize("enable_migrate_event_ids", [True])
@pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_out_of_disk_space_while_removing_foreign_key(
async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
recorder_db_url: str,
) -> None:
"""Test that we can recover from out of disk space while removing the foreign key.
This case tests the migration still happens if
ix_states_event_id is removed from the states table.
"""
importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE]
now = dt_util.utcnow()
one_second_past = now - timedelta(seconds=1)
mock_state = State(
"sensor.test",
"old",
{"last_reset": now.isoformat()},
last_changed=one_second_past,
last_updated=now,
)
state_changed_event = Event(
EVENT_STATE_CHANGED,
{
"entity_id": "sensor.test",
"old_state": None,
"new_state": mock_state,
},
EventOrigin.local,
time_fired_timestamp=now.timestamp(),
)
custom_event = Event(
"custom_event",
{"entity_id": "sensor.custom"},
EventOrigin.local,
time_fired_timestamp=now.timestamp(),
)
number_of_migrations = 5
def _get_event_id_foreign_keys():
assert instance.engine is not None
return next(
(
fk # type: ignore[misc]
for fk in inspect(instance.engine).get_foreign_keys("states")
if fk["constrained_columns"] == ["event_id"]
),
None,
)
def _get_states_index_names():
with session_scope(hass=hass) as session:
return inspect(session.connection()).get_indexes("states")
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
),
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
patch.object(core, "EventTypes", old_db_schema.EventTypes),
patch.object(core, "EventData", old_db_schema.EventData),
patch.object(core, "States", old_db_schema.States),
patch.object(core, "Events", old_db_schema.Events),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"),
patch(
"homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids"
),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
def _add_data():
with session_scope(hass=hass) as session:
session.add(old_db_schema.Events.from_event(custom_event))
session.add(old_db_schema.States.from_event(state_changed_event))
await instance.async_add_executor_job(_add_data)
await hass.async_block_till_done()
await instance.async_block_till_done()
await instance.async_add_executor_job(
migration._drop_index,
instance.get_session,
"states",
"ix_states_event_id",
)
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is True
assert (
await instance.async_add_executor_job(_get_event_id_foreign_keys)
is not None
)
await hass.async_stop()
await hass.async_block_till_done()
assert "ix_states_entity_id_last_updated_ts" in states_index_names
# Simulate out of disk space while removing the foreign key from the states table by
# - patching DropConstraint to raise InternalError for MySQL and PostgreSQL
with (
patch(
"homeassistant.components.recorder.migration.DropConstraint",
side_effect=OperationalError(
None, None, OSError("No space left on device")
),
),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await instance.async_block_till_done()
await async_wait_recording_done(hass)
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is True
assert await instance.async_add_executor_job(_get_event_id_foreign_keys)
await hass.async_stop()
# Now run it again to verify the table rebuild tries again
caplog.clear()
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await instance.async_block_till_done()
await async_wait_recording_done(hass)
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
states_index_names = {index["name"] for index in states_indexes}
assert instance.use_legacy_events_index is False
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
assert "ix_states_event_id" not in states_index_names
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
await hass.async_stop()
+14
View File
@@ -4,9 +4,15 @@ from unittest.mock import patch
from syrupy import SnapshotAssertion
from homeassistant.components.application_credentials import (
ClientCredential,
async_import_client_credential,
)
from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@@ -18,6 +24,14 @@ async def setup_platform(
) -> None:
"""Set up the Tesla Fleet platform."""
assert await async_setup_component(hass, "application_credentials", {})
await async_import_client_credential(
hass,
DOMAIN,
ClientCredential(CLIENT_ID, "", "Home Assistant"),
DOMAIN,
)
config_entry.add_to_hass(hass)
if platforms is None:
-19
View File
@@ -10,14 +10,7 @@ from unittest.mock import AsyncMock, patch
import jwt
import pytest
from homeassistant.components.application_credentials import (
ClientCredential,
async_import_client_credential,
)
from homeassistant.components.tesla_fleet.application_credentials import CLIENT_ID
from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE
@@ -71,18 +64,6 @@ def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
)
@pytest.fixture(autouse=True)
async def setup_credentials(hass: HomeAssistant) -> None:
"""Fixture to setup credentials."""
assert await async_setup_component(hass, "application_credentials", {})
await async_import_client_credential(
hass,
DOMAIN,
ClientCredential(CLIENT_ID, ""),
DOMAIN,
)
@pytest.fixture(autouse=True)
def mock_products() -> Generator[AsyncMock]:
"""Mock Tesla Fleet Api products method."""
@@ -5,12 +5,13 @@ from urllib.parse import parse_qs, urlparse
import pytest
from homeassistant.components.tesla_fleet.application_credentials import (
from homeassistant.components.tesla_fleet.const import (
AUTHORIZE_URL,
CLIENT_ID,
DOMAIN,
SCOPES,
TOKEN_URL,
)
from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
+22 -1
View File
@@ -3,6 +3,7 @@
import asyncio
import typing
from unittest.mock import AsyncMock, Mock, patch
import zoneinfo
import pytest
from zigpy.application import ControllerApplication
@@ -16,7 +17,7 @@ from homeassistant.components.zha.const import (
CONF_USB_PATH,
DOMAIN,
)
from homeassistant.components.zha.helpers import get_zha_data
from homeassistant.components.zha.helpers import get_zha_data, get_zha_gateway
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP,
MAJOR_VERSION,
@@ -288,3 +289,23 @@ async def test_shutdown_on_ha_stop(
await hass.async_block_till_done()
assert len(mock_shutdown.mock_calls) == 1
async def test_timezone_update(
hass: HomeAssistant,
config_entry: MockConfigEntry,
mock_zigpy_connect: ControllerApplication,
) -> None:
"""Test that the ZHA gateway timezone is updated when HA timezone changes."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
gateway = get_zha_gateway(hass)
assert hass.config.time_zone == "US/Pacific"
assert gateway.config.local_timezone == zoneinfo.ZoneInfo("US/Pacific")
await hass.config.async_update(time_zone="America/New_York")
assert hass.config.time_zone == "America/New_York"
assert gateway.config.local_timezone == zoneinfo.ZoneInfo("America/New_York")