diff --git a/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py b/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py index b73744ef0d1..f203d6ab69a 100644 --- a/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py +++ b/homeassistant/components/recorder/auto_repairs/statistics/duplicates.py @@ -17,7 +17,7 @@ from homeassistant.helpers.json import JSONEncoder from homeassistant.helpers.storage import STORAGE_DIR from homeassistant.util import dt as dt_util -from ...const import SQLITE_MAX_BIND_VARS +from ...const import DEFAULT_MAX_BIND_VARS from ...db_schema import Statistics, StatisticsBase, StatisticsMeta, StatisticsShortTerm from ...util import database_job_retry_wrapper, execute @@ -61,7 +61,7 @@ def _find_duplicates( ) .filter(subquery.c.is_duplicate == 1) .order_by(table.metadata_id, table.start, table.id.desc()) - .limit(1000 * SQLITE_MAX_BIND_VARS) + .limit(1000 * DEFAULT_MAX_BIND_VARS) ) duplicates = execute(query) original_as_dict = {} @@ -125,10 +125,10 @@ def _delete_duplicates_from_table( if not duplicate_ids: break all_non_identical_duplicates.extend(non_identical_duplicates) - for i in range(0, len(duplicate_ids), SQLITE_MAX_BIND_VARS): + for i in range(0, len(duplicate_ids), DEFAULT_MAX_BIND_VARS): deleted_rows = ( session.query(table) - .filter(table.id.in_(duplicate_ids[i : i + SQLITE_MAX_BIND_VARS])) + .filter(table.id.in_(duplicate_ids[i : i + DEFAULT_MAX_BIND_VARS])) .delete(synchronize_session=False) ) total_deleted_rows += deleted_rows @@ -205,7 +205,7 @@ def _find_statistics_meta_duplicates(session: Session) -> list[int]: ) .filter(subquery.c.is_duplicate == 1) .order_by(StatisticsMeta.statistic_id, StatisticsMeta.id.desc()) - .limit(1000 * SQLITE_MAX_BIND_VARS) + .limit(1000 * DEFAULT_MAX_BIND_VARS) ) duplicates = execute(query) statistic_id = None @@ -230,11 +230,11 @@ def _delete_statistics_meta_duplicates(session: Session) -> int: duplicate_ids = _find_statistics_meta_duplicates(session) if not duplicate_ids: break - for i in range(0, len(duplicate_ids), SQLITE_MAX_BIND_VARS): + for i in range(0, len(duplicate_ids), DEFAULT_MAX_BIND_VARS): deleted_rows = ( session.query(StatisticsMeta) .filter( - StatisticsMeta.id.in_(duplicate_ids[i : i + SQLITE_MAX_BIND_VARS]) + StatisticsMeta.id.in_(duplicate_ids[i : i + DEFAULT_MAX_BIND_VARS]) ) .delete(synchronize_session=False) ) diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 409641e54c9..c91845e8436 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -32,16 +32,6 @@ MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG = 256 * 1024**2 # The maximum number of rows (events) we purge in one delete statement -# sqlite3 has a limit of 999 until version 3.32.0 -# in https://github.com/sqlite/sqlite/commit/efdba1a8b3c6c967e7fae9c1989c40d420ce64cc -# We can increase this back to 1000 once most -# have upgraded their sqlite version -SQLITE_MAX_BIND_VARS = 998 - -# The maximum bind vars for sqlite 3.32.0 and above, but -# capped at 4000 to avoid performance issues -SQLITE_MODERN_MAX_BIND_VARS = 4000 - DEFAULT_MAX_BIND_VARS = 4000 DB_WORKER_PREFIX = "DbWorker" diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index fee72ce273f..5a405061a94 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -52,6 +52,7 @@ from homeassistant.util.event_type import EventType from . import migration, statistics from .const import ( DB_WORKER_PREFIX, + DEFAULT_MAX_BIND_VARS, DOMAIN, KEEPALIVE_TIME, LAST_REPORTED_SCHEMA_VERSION, @@ -61,7 +62,6 @@ from .const import ( MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG, MYSQLDB_PYMYSQL_URL_PREFIX, MYSQLDB_URL_PREFIX, - SQLITE_MAX_BIND_VARS, SQLITE_URL_PREFIX, SupportedDialect, ) @@ -230,12 +230,9 @@ class Recorder(threading.Thread): self._dialect_name: SupportedDialect | None = None self.enabled = True - # For safety we default to the lowest value for max_bind_vars - # of all the DB types (SQLITE_MAX_BIND_VARS). - # # We update the value once we connect to the DB # and determine what is actually supported. - self.max_bind_vars = SQLITE_MAX_BIND_VARS + self.max_bind_vars = DEFAULT_MAX_BIND_VARS @property def backlog(self) -> int: diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 632553838c2..55364863f7e 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -36,14 +36,7 @@ from homeassistant.helpers.recorder import ( # noqa: F401 ) import homeassistant.util.dt as dt_util -from .const import ( - DEFAULT_MAX_BIND_VARS, - DOMAIN, - SQLITE_MAX_BIND_VARS, - SQLITE_MODERN_MAX_BIND_VARS, - SQLITE_URL_PREFIX, - SupportedDialect, -) +from .const import DEFAULT_MAX_BIND_VARS, DOMAIN, SQLITE_URL_PREFIX, SupportedDialect from .db_schema import ( TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES, @@ -96,7 +89,6 @@ MARIADB_WITH_FIXED_IN_QUERIES_108 = _simple_version("10.8.4") MIN_VERSION_MYSQL = _simple_version("8.0.0") MIN_VERSION_PGSQL = _simple_version("12.0") MIN_VERSION_SQLITE = _simple_version("3.40.1") -MIN_VERSION_SQLITE_MODERN_BIND_VARS = _simple_version("3.40.1") # This is the maximum time after the recorder ends the session @@ -473,7 +465,6 @@ def setup_connection_for_dialect( version: AwesomeVersion | None = None slow_range_in_select = False if dialect_name == SupportedDialect.SQLITE: - max_bind_vars = SQLITE_MAX_BIND_VARS if first_connection: old_isolation = dbapi_connection.isolation_level # type: ignore[attr-defined] dbapi_connection.isolation_level = None # type: ignore[attr-defined] @@ -491,9 +482,6 @@ def setup_connection_for_dialect( version or version_string, "SQLite", MIN_VERSION_SQLITE ) - if version and version > MIN_VERSION_SQLITE_MODERN_BIND_VARS: - max_bind_vars = SQLITE_MODERN_MAX_BIND_VARS - # The upper bound on the cache size is approximately 16MiB of memory execute_on_connection(dbapi_connection, "PRAGMA cache_size = -16384") @@ -512,7 +500,6 @@ def setup_connection_for_dialect( execute_on_connection(dbapi_connection, "PRAGMA foreign_keys=ON") elif dialect_name == SupportedDialect.MYSQL: - max_bind_vars = DEFAULT_MAX_BIND_VARS execute_on_connection(dbapi_connection, "SET session wait_timeout=28800") if first_connection: result = query_on_connection(dbapi_connection, "SELECT VERSION()") @@ -553,7 +540,6 @@ def setup_connection_for_dialect( # Ensure all times are using UTC to avoid issues with daylight savings execute_on_connection(dbapi_connection, "SET time_zone = '+00:00'") elif dialect_name == SupportedDialect.POSTGRESQL: - max_bind_vars = DEFAULT_MAX_BIND_VARS # PostgreSQL does not support a skip/loose index scan so its # also slow for large distinct queries: # https://wiki.postgresql.org/wiki/Loose_indexscan @@ -580,7 +566,7 @@ def setup_connection_for_dialect( dialect=SupportedDialect(dialect_name), version=version, optimizer=DatabaseOptimizer(slow_range_in_select=slow_range_in_select), - max_bind_vars=max_bind_vars, + max_bind_vars=DEFAULT_MAX_BIND_VARS, )