mirror of
https://github.com/home-assistant/core.git
synced 2025-06-25 01:21:51 +02:00
Improve formatting in core files (#135256)
* Adjust core files formatting * Adjust translations script
This commit is contained in:
@ -119,7 +119,7 @@ def _extract_backup(
|
|||||||
Path(
|
Path(
|
||||||
tempdir,
|
tempdir,
|
||||||
"extracted",
|
"extracted",
|
||||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
f"homeassistant.tar{'.gz' if backup_meta['compressed'] else ''}",
|
||||||
),
|
),
|
||||||
gzip=backup_meta["compressed"],
|
gzip=backup_meta["compressed"],
|
||||||
key=password_to_key(restore_content.password)
|
key=password_to_key(restore_content.password)
|
||||||
|
@ -691,10 +691,7 @@ class ConfigEntry(Generic[_DataT]):
|
|||||||
self._tries += 1
|
self._tries += 1
|
||||||
ready_message = f"ready yet: {message}" if message else "ready yet"
|
ready_message = f"ready yet: {message}" if message else "ready yet"
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
(
|
"Config entry '%s' for %s integration not %s; Retrying in %d seconds",
|
||||||
"Config entry '%s' for %s integration not %s; Retrying in %d"
|
|
||||||
" seconds"
|
|
||||||
),
|
|
||||||
self.title,
|
self.title,
|
||||||
self.domain,
|
self.domain,
|
||||||
ready_message,
|
ready_message,
|
||||||
|
@ -1153,8 +1153,7 @@ class HomeAssistant:
|
|||||||
await self.async_block_till_done()
|
await self.async_block_till_done()
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Timed out waiting for integrations to stop, the shutdown will"
|
"Timed out waiting for integrations to stop, the shutdown will continue"
|
||||||
" continue"
|
|
||||||
)
|
)
|
||||||
self._async_log_running_tasks("stop integrations")
|
self._async_log_running_tasks("stop integrations")
|
||||||
|
|
||||||
|
@ -174,7 +174,7 @@ class ConditionErrorIndex(ConditionError):
|
|||||||
"""Yield an indented representation."""
|
"""Yield an indented representation."""
|
||||||
if self.total > 1:
|
if self.total > 1:
|
||||||
yield self._indent(
|
yield self._indent(
|
||||||
indent, f"In '{self.type}' (item {self.index+1} of {self.total}):"
|
indent, f"In '{self.type}' (item {self.index + 1} of {self.total}):"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
yield self._indent(indent, f"In '{self.type}':")
|
yield self._indent(indent, f"In '{self.type}':")
|
||||||
|
@ -154,7 +154,7 @@ def _format_err[*_Ts](
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
# Functions wrapped in partial do not have a __name__
|
# Functions wrapped in partial do not have a __name__
|
||||||
f"Exception in {getattr(target, "__name__", None) or target} "
|
f"Exception in {getattr(target, '__name__', None) or target} "
|
||||||
f"when dispatching '{signal}': {args}"
|
f"when dispatching '{signal}': {args}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -666,7 +666,7 @@ def _validate_item(
|
|||||||
# In HA Core 2025.10, we should fail if unique_id is not a string
|
# In HA Core 2025.10, we should fail if unique_id is not a string
|
||||||
report_issue = async_suggest_report_issue(hass, integration_domain=platform)
|
report_issue = async_suggest_report_issue(hass, integration_domain=platform)
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
("'%s' from integration %s has a non string unique_id" " '%s', please %s"),
|
"'%s' from integration %s has a non string unique_id '%s', please %s",
|
||||||
domain,
|
domain,
|
||||||
platform,
|
platform,
|
||||||
unique_id,
|
unique_id,
|
||||||
@ -799,7 +799,7 @@ class EntityRegistry(BaseRegistry):
|
|||||||
tries += 1
|
tries += 1
|
||||||
len_suffix = len(str(tries)) + 1
|
len_suffix = len(str(tries)) + 1
|
||||||
test_string = (
|
test_string = (
|
||||||
f"{preferred_string[:MAX_LENGTH_STATE_ENTITY_ID-len_suffix]}_{tries}"
|
f"{preferred_string[: MAX_LENGTH_STATE_ENTITY_ID - len_suffix]}_{tries}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return test_string
|
return test_string
|
||||||
|
@ -1770,7 +1770,7 @@ class Script:
|
|||||||
f"{self.domain}.{self.name} which is already running "
|
f"{self.domain}.{self.name} which is already running "
|
||||||
"in the current execution path; "
|
"in the current execution path; "
|
||||||
"Traceback (most recent call last):\n"
|
"Traceback (most recent call last):\n"
|
||||||
f"{"\n".join(formatted_stack)}",
|
f"{'\n'.join(formatted_stack)}",
|
||||||
level=logging.WARNING,
|
level=logging.WARNING,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
@ -1834,7 +1834,7 @@ class Script:
|
|||||||
|
|
||||||
def _prep_repeat_script(self, step: int) -> Script:
|
def _prep_repeat_script(self, step: int) -> Script:
|
||||||
action = self.sequence[step]
|
action = self.sequence[step]
|
||||||
step_name = action.get(CONF_ALIAS, f"Repeat at step {step+1}")
|
step_name = action.get(CONF_ALIAS, f"Repeat at step {step + 1}")
|
||||||
sub_script = Script(
|
sub_script = Script(
|
||||||
self._hass,
|
self._hass,
|
||||||
action[CONF_REPEAT][CONF_SEQUENCE],
|
action[CONF_REPEAT][CONF_SEQUENCE],
|
||||||
@ -1857,7 +1857,7 @@ class Script:
|
|||||||
|
|
||||||
async def _async_prep_choose_data(self, step: int) -> _ChooseData:
|
async def _async_prep_choose_data(self, step: int) -> _ChooseData:
|
||||||
action = self.sequence[step]
|
action = self.sequence[step]
|
||||||
step_name = action.get(CONF_ALIAS, f"Choose at step {step+1}")
|
step_name = action.get(CONF_ALIAS, f"Choose at step {step + 1}")
|
||||||
choices = []
|
choices = []
|
||||||
for idx, choice in enumerate(action[CONF_CHOOSE], start=1):
|
for idx, choice in enumerate(action[CONF_CHOOSE], start=1):
|
||||||
conditions = [
|
conditions = [
|
||||||
@ -1911,7 +1911,7 @@ class Script:
|
|||||||
async def _async_prep_if_data(self, step: int) -> _IfData:
|
async def _async_prep_if_data(self, step: int) -> _IfData:
|
||||||
"""Prepare data for an if statement."""
|
"""Prepare data for an if statement."""
|
||||||
action = self.sequence[step]
|
action = self.sequence[step]
|
||||||
step_name = action.get(CONF_ALIAS, f"If at step {step+1}")
|
step_name = action.get(CONF_ALIAS, f"If at step {step + 1}")
|
||||||
|
|
||||||
conditions = [
|
conditions = [
|
||||||
await self._async_get_condition(config) for config in action[CONF_IF]
|
await self._async_get_condition(config) for config in action[CONF_IF]
|
||||||
@ -1962,7 +1962,7 @@ class Script:
|
|||||||
|
|
||||||
async def _async_prep_parallel_scripts(self, step: int) -> list[Script]:
|
async def _async_prep_parallel_scripts(self, step: int) -> list[Script]:
|
||||||
action = self.sequence[step]
|
action = self.sequence[step]
|
||||||
step_name = action.get(CONF_ALIAS, f"Parallel action at step {step+1}")
|
step_name = action.get(CONF_ALIAS, f"Parallel action at step {step + 1}")
|
||||||
parallel_scripts: list[Script] = []
|
parallel_scripts: list[Script] = []
|
||||||
for idx, parallel_script in enumerate(action[CONF_PARALLEL], start=1):
|
for idx, parallel_script in enumerate(action[CONF_PARALLEL], start=1):
|
||||||
parallel_name = parallel_script.get(CONF_ALIAS, f"parallel {idx}")
|
parallel_name = parallel_script.get(CONF_ALIAS, f"parallel {idx}")
|
||||||
@ -1994,7 +1994,7 @@ class Script:
|
|||||||
async def _async_prep_sequence_script(self, step: int) -> Script:
|
async def _async_prep_sequence_script(self, step: int) -> Script:
|
||||||
"""Prepare a sequence script."""
|
"""Prepare a sequence script."""
|
||||||
action = self.sequence[step]
|
action = self.sequence[step]
|
||||||
step_name = action.get(CONF_ALIAS, f"Sequence action at step {step+1}")
|
step_name = action.get(CONF_ALIAS, f"Sequence action at step {step + 1}")
|
||||||
|
|
||||||
sequence_script = Script(
|
sequence_script = Script(
|
||||||
self._hass,
|
self._hass,
|
||||||
|
@ -133,8 +133,7 @@ def _validate_option_or_feature(option_or_feature: str, label: str) -> Any:
|
|||||||
domain, enum, option = option_or_feature.split(".", 2)
|
domain, enum, option = option_or_feature.split(".", 2)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise vol.Invalid(
|
raise vol.Invalid(
|
||||||
f"Invalid {label} '{option_or_feature}', expected "
|
f"Invalid {label} '{option_or_feature}', expected <domain>.<enum>.<member>"
|
||||||
"<domain>.<enum>.<member>"
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
base_components = _base_components()
|
base_components = _base_components()
|
||||||
|
@ -1765,8 +1765,7 @@ def async_suggest_report_issue(
|
|||||||
if not integration_domain:
|
if not integration_domain:
|
||||||
return "report it to the custom integration author"
|
return "report it to the custom integration author"
|
||||||
return (
|
return (
|
||||||
f"report it to the author of the '{integration_domain}' "
|
f"report it to the author of the '{integration_domain}' custom integration"
|
||||||
"custom integration"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return f"create a bug report at {issue_tracker}"
|
return f"create a bug report at {issue_tracker}"
|
||||||
|
@ -94,7 +94,8 @@ def raise_for_blocking_call(
|
|||||||
|
|
||||||
if found_frame is None:
|
if found_frame is None:
|
||||||
raise RuntimeError( # noqa: TRY200
|
raise RuntimeError( # noqa: TRY200
|
||||||
f"Caught blocking call to {func.__name__} with args {mapped_args.get("args")} "
|
f"Caught blocking call to {func.__name__} "
|
||||||
|
f"with args {mapped_args.get('args')} "
|
||||||
f"in {offender_filename}, line {offender_lineno}: {offender_line} "
|
f"in {offender_filename}, line {offender_lineno}: {offender_line} "
|
||||||
"inside the event loop; "
|
"inside the event loop; "
|
||||||
"This is causing stability issues. "
|
"This is causing stability issues. "
|
||||||
|
@ -231,8 +231,7 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
|||||||
if integrations_path.read_text() != content + "\n":
|
if integrations_path.read_text() != content + "\n":
|
||||||
config.add_error(
|
config.add_error(
|
||||||
"config_flow",
|
"config_flow",
|
||||||
"File integrations.json is not up to date. "
|
"File integrations.json is not up to date. Run python3 -m script.hassfest",
|
||||||
"Run python3 -m script.hassfest",
|
|
||||||
fixable=True,
|
fixable=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -55,8 +55,7 @@ def validate(
|
|||||||
config_flow = ast_parse_module(config_flow_file)
|
config_flow = ast_parse_module(config_flow_file)
|
||||||
if not (_has_discovery_function(config_flow)):
|
if not (_has_discovery_function(config_flow)):
|
||||||
return [
|
return [
|
||||||
f"Integration is missing one of {CONFIG_FLOW_STEPS} "
|
f"Integration is missing one of {CONFIG_FLOW_STEPS} in {config_flow_file}"
|
||||||
f"in {config_flow_file}"
|
|
||||||
]
|
]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -510,8 +510,8 @@ def validate_translation_file( # noqa: C901
|
|||||||
):
|
):
|
||||||
integration.add_error(
|
integration.add_error(
|
||||||
"translations",
|
"translations",
|
||||||
"Don't specify title in translation strings if it's a brand "
|
"Don't specify title in translation strings if it's "
|
||||||
"name or add exception to ALLOW_NAME_TRANSLATION",
|
"a brand name or add exception to ALLOW_NAME_TRANSLATION",
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
@ -532,12 +532,15 @@ def validate_translation_file( # noqa: C901
|
|||||||
if parts or key not in search:
|
if parts or key not in search:
|
||||||
integration.add_error(
|
integration.add_error(
|
||||||
"translations",
|
"translations",
|
||||||
f"{reference['source']} contains invalid reference {reference['ref']}: Could not find {key}",
|
f"{reference['source']} contains invalid reference"
|
||||||
|
f"{reference['ref']}: Could not find {key}",
|
||||||
)
|
)
|
||||||
elif match := re.match(RE_REFERENCE, search[key]):
|
elif match := re.match(RE_REFERENCE, search[key]):
|
||||||
integration.add_error(
|
integration.add_error(
|
||||||
"translations",
|
"translations",
|
||||||
f"Lokalise supports only one level of references: \"{reference['source']}\" should point to directly to \"{match.groups()[0]}\"",
|
"Lokalise supports only one level of references: "
|
||||||
|
f'"{reference["source"]}" should point to directly '
|
||||||
|
f'to "{match.groups()[0]}"',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ def gather_new_integration(determine_auth: bool) -> Info:
|
|||||||
"prompt": (
|
"prompt": (
|
||||||
f"""How will your integration gather data?
|
f"""How will your integration gather data?
|
||||||
|
|
||||||
Valid values are {', '.join(SUPPORTED_IOT_CLASSES)}
|
Valid values are {", ".join(SUPPORTED_IOT_CLASSES)}
|
||||||
|
|
||||||
More info @ https://developers.home-assistant.io/docs/creating_integration_manifest#iot-class
|
More info @ https://developers.home-assistant.io/docs/creating_integration_manifest#iot-class
|
||||||
"""
|
"""
|
||||||
|
@ -79,7 +79,7 @@ class BucketHolder:
|
|||||||
"""Create output file."""
|
"""Create output file."""
|
||||||
with Path("pytest_buckets.txt").open("w") as file:
|
with Path("pytest_buckets.txt").open("w") as file:
|
||||||
for idx, bucket in enumerate(self._buckets):
|
for idx, bucket in enumerate(self._buckets):
|
||||||
print(f"Bucket {idx+1} has {bucket.total_tests} tests")
|
print(f"Bucket {idx + 1} has {bucket.total_tests} tests")
|
||||||
file.write(bucket.get_paths_line())
|
file.write(bucket.get_paths_line())
|
||||||
|
|
||||||
|
|
||||||
|
@ -777,7 +777,7 @@ async def _check_config_flow_result_translations(
|
|||||||
translation_errors,
|
translation_errors,
|
||||||
category,
|
category,
|
||||||
integration,
|
integration,
|
||||||
f"{key_prefix}abort.{result["reason"]}",
|
f"{key_prefix}abort.{result['reason']}",
|
||||||
result["description_placeholders"],
|
result["description_placeholders"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -580,7 +580,7 @@ async def hass(
|
|||||||
exceptions.append(
|
exceptions.append(
|
||||||
Exception(
|
Exception(
|
||||||
"Received exception handler without exception, "
|
"Received exception handler without exception, "
|
||||||
f"but with message: {context["message"]}"
|
f"but with message: {context['message']}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
orig_exception_handler(loop, context)
|
orig_exception_handler(loop, context)
|
||||||
|
@ -74,7 +74,7 @@ def test_load_translations_files_by_language(
|
|||||||
"name": "Other 4",
|
"name": "Other 4",
|
||||||
"unit_of_measurement": "quantities",
|
"unit_of_measurement": "quantities",
|
||||||
},
|
},
|
||||||
"outlet": {"name": "Outlet " "{placeholder}"},
|
"outlet": {"name": "Outlet {placeholder}"},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"something": "else",
|
"something": "else",
|
||||||
|
@ -376,7 +376,7 @@ def override_syrupy_finish(self: SnapshotSession) -> int:
|
|||||||
with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f:
|
with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f:
|
||||||
f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT"))
|
f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT"))
|
||||||
with open(
|
with open(
|
||||||
f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result",
|
f".pytest_syrupy_{os.getenv('PYTEST_XDIST_WORKER')}_result",
|
||||||
"w",
|
"w",
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
) as f:
|
) as f:
|
||||||
|
@ -7266,9 +7266,9 @@ async def test_unique_id_collision_issues(
|
|||||||
mock_setup_entry = AsyncMock(return_value=True)
|
mock_setup_entry = AsyncMock(return_value=True)
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
mock_integration(
|
mock_integration(
|
||||||
hass, MockModule(f"test{i+1}", async_setup_entry=mock_setup_entry)
|
hass, MockModule(f"test{i + 1}", async_setup_entry=mock_setup_entry)
|
||||||
)
|
)
|
||||||
mock_platform(hass, f"test{i+1}.config_flow", None)
|
mock_platform(hass, f"test{i + 1}.config_flow", None)
|
||||||
|
|
||||||
test2_group_1: list[MockConfigEntry] = []
|
test2_group_1: list[MockConfigEntry] = []
|
||||||
test2_group_2: list[MockConfigEntry] = []
|
test2_group_2: list[MockConfigEntry] = []
|
||||||
|
Reference in New Issue
Block a user