Improve formatting in core files (#135256)
* Adjust core files formatting * Adjust translations script
This commit is contained in:
parent
823feae0f9
commit
5df7092f41
|
@ -119,7 +119,7 @@ def _extract_backup(
|
|||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
f"homeassistant.tar{'.gz' if backup_meta['compressed'] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
|
|
|
@ -691,10 +691,7 @@ class ConfigEntry(Generic[_DataT]):
|
|||
self._tries += 1
|
||||
ready_message = f"ready yet: {message}" if message else "ready yet"
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Config entry '%s' for %s integration not %s; Retrying in %d"
|
||||
" seconds"
|
||||
),
|
||||
"Config entry '%s' for %s integration not %s; Retrying in %d seconds",
|
||||
self.title,
|
||||
self.domain,
|
||||
ready_message,
|
||||
|
|
|
@ -1153,8 +1153,7 @@ class HomeAssistant:
|
|||
await self.async_block_till_done()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Timed out waiting for integrations to stop, the shutdown will"
|
||||
" continue"
|
||||
"Timed out waiting for integrations to stop, the shutdown will continue"
|
||||
)
|
||||
self._async_log_running_tasks("stop integrations")
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ class ConditionErrorIndex(ConditionError):
|
|||
"""Yield an indented representation."""
|
||||
if self.total > 1:
|
||||
yield self._indent(
|
||||
indent, f"In '{self.type}' (item {self.index+1} of {self.total}):"
|
||||
indent, f"In '{self.type}' (item {self.index + 1} of {self.total}):"
|
||||
)
|
||||
else:
|
||||
yield self._indent(indent, f"In '{self.type}':")
|
||||
|
|
|
@ -154,7 +154,7 @@ def _format_err[*_Ts](
|
|||
|
||||
return (
|
||||
# Functions wrapped in partial do not have a __name__
|
||||
f"Exception in {getattr(target, "__name__", None) or target} "
|
||||
f"Exception in {getattr(target, '__name__', None) or target} "
|
||||
f"when dispatching '{signal}': {args}"
|
||||
)
|
||||
|
||||
|
|
|
@ -666,7 +666,7 @@ def _validate_item(
|
|||
# In HA Core 2025.10, we should fail if unique_id is not a string
|
||||
report_issue = async_suggest_report_issue(hass, integration_domain=platform)
|
||||
_LOGGER.error(
|
||||
("'%s' from integration %s has a non string unique_id" " '%s', please %s"),
|
||||
"'%s' from integration %s has a non string unique_id '%s', please %s",
|
||||
domain,
|
||||
platform,
|
||||
unique_id,
|
||||
|
@ -799,7 +799,7 @@ class EntityRegistry(BaseRegistry):
|
|||
tries += 1
|
||||
len_suffix = len(str(tries)) + 1
|
||||
test_string = (
|
||||
f"{preferred_string[:MAX_LENGTH_STATE_ENTITY_ID-len_suffix]}_{tries}"
|
||||
f"{preferred_string[: MAX_LENGTH_STATE_ENTITY_ID - len_suffix]}_{tries}"
|
||||
)
|
||||
|
||||
return test_string
|
||||
|
|
|
@ -1770,7 +1770,7 @@ class Script:
|
|||
f"{self.domain}.{self.name} which is already running "
|
||||
"in the current execution path; "
|
||||
"Traceback (most recent call last):\n"
|
||||
f"{"\n".join(formatted_stack)}",
|
||||
f"{'\n'.join(formatted_stack)}",
|
||||
level=logging.WARNING,
|
||||
)
|
||||
return None
|
||||
|
@ -1834,7 +1834,7 @@ class Script:
|
|||
|
||||
def _prep_repeat_script(self, step: int) -> Script:
|
||||
action = self.sequence[step]
|
||||
step_name = action.get(CONF_ALIAS, f"Repeat at step {step+1}")
|
||||
step_name = action.get(CONF_ALIAS, f"Repeat at step {step + 1}")
|
||||
sub_script = Script(
|
||||
self._hass,
|
||||
action[CONF_REPEAT][CONF_SEQUENCE],
|
||||
|
@ -1857,7 +1857,7 @@ class Script:
|
|||
|
||||
async def _async_prep_choose_data(self, step: int) -> _ChooseData:
|
||||
action = self.sequence[step]
|
||||
step_name = action.get(CONF_ALIAS, f"Choose at step {step+1}")
|
||||
step_name = action.get(CONF_ALIAS, f"Choose at step {step + 1}")
|
||||
choices = []
|
||||
for idx, choice in enumerate(action[CONF_CHOOSE], start=1):
|
||||
conditions = [
|
||||
|
@ -1911,7 +1911,7 @@ class Script:
|
|||
async def _async_prep_if_data(self, step: int) -> _IfData:
|
||||
"""Prepare data for an if statement."""
|
||||
action = self.sequence[step]
|
||||
step_name = action.get(CONF_ALIAS, f"If at step {step+1}")
|
||||
step_name = action.get(CONF_ALIAS, f"If at step {step + 1}")
|
||||
|
||||
conditions = [
|
||||
await self._async_get_condition(config) for config in action[CONF_IF]
|
||||
|
@ -1962,7 +1962,7 @@ class Script:
|
|||
|
||||
async def _async_prep_parallel_scripts(self, step: int) -> list[Script]:
|
||||
action = self.sequence[step]
|
||||
step_name = action.get(CONF_ALIAS, f"Parallel action at step {step+1}")
|
||||
step_name = action.get(CONF_ALIAS, f"Parallel action at step {step + 1}")
|
||||
parallel_scripts: list[Script] = []
|
||||
for idx, parallel_script in enumerate(action[CONF_PARALLEL], start=1):
|
||||
parallel_name = parallel_script.get(CONF_ALIAS, f"parallel {idx}")
|
||||
|
@ -1994,7 +1994,7 @@ class Script:
|
|||
async def _async_prep_sequence_script(self, step: int) -> Script:
|
||||
"""Prepare a sequence script."""
|
||||
action = self.sequence[step]
|
||||
step_name = action.get(CONF_ALIAS, f"Sequence action at step {step+1}")
|
||||
step_name = action.get(CONF_ALIAS, f"Sequence action at step {step + 1}")
|
||||
|
||||
sequence_script = Script(
|
||||
self._hass,
|
||||
|
|
|
@ -133,8 +133,7 @@ def _validate_option_or_feature(option_or_feature: str, label: str) -> Any:
|
|||
domain, enum, option = option_or_feature.split(".", 2)
|
||||
except ValueError as exc:
|
||||
raise vol.Invalid(
|
||||
f"Invalid {label} '{option_or_feature}', expected "
|
||||
"<domain>.<enum>.<member>"
|
||||
f"Invalid {label} '{option_or_feature}', expected <domain>.<enum>.<member>"
|
||||
) from exc
|
||||
|
||||
base_components = _base_components()
|
||||
|
|
|
@ -1765,8 +1765,7 @@ def async_suggest_report_issue(
|
|||
if not integration_domain:
|
||||
return "report it to the custom integration author"
|
||||
return (
|
||||
f"report it to the author of the '{integration_domain}' "
|
||||
"custom integration"
|
||||
f"report it to the author of the '{integration_domain}' custom integration"
|
||||
)
|
||||
|
||||
return f"create a bug report at {issue_tracker}"
|
||||
|
|
|
@ -94,7 +94,8 @@ def raise_for_blocking_call(
|
|||
|
||||
if found_frame is None:
|
||||
raise RuntimeError( # noqa: TRY200
|
||||
f"Caught blocking call to {func.__name__} with args {mapped_args.get("args")} "
|
||||
f"Caught blocking call to {func.__name__} "
|
||||
f"with args {mapped_args.get('args')} "
|
||||
f"in {offender_filename}, line {offender_lineno}: {offender_line} "
|
||||
"inside the event loop; "
|
||||
"This is causing stability issues. "
|
||||
|
|
|
@ -231,8 +231,7 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
|||
if integrations_path.read_text() != content + "\n":
|
||||
config.add_error(
|
||||
"config_flow",
|
||||
"File integrations.json is not up to date. "
|
||||
"Run python3 -m script.hassfest",
|
||||
"File integrations.json is not up to date. Run python3 -m script.hassfest",
|
||||
fixable=True,
|
||||
)
|
||||
|
||||
|
|
|
@ -55,8 +55,7 @@ def validate(
|
|||
config_flow = ast_parse_module(config_flow_file)
|
||||
if not (_has_discovery_function(config_flow)):
|
||||
return [
|
||||
f"Integration is missing one of {CONFIG_FLOW_STEPS} "
|
||||
f"in {config_flow_file}"
|
||||
f"Integration is missing one of {CONFIG_FLOW_STEPS} in {config_flow_file}"
|
||||
]
|
||||
|
||||
return None
|
||||
|
|
|
@ -510,8 +510,8 @@ def validate_translation_file( # noqa: C901
|
|||
):
|
||||
integration.add_error(
|
||||
"translations",
|
||||
"Don't specify title in translation strings if it's a brand "
|
||||
"name or add exception to ALLOW_NAME_TRANSLATION",
|
||||
"Don't specify title in translation strings if it's "
|
||||
"a brand name or add exception to ALLOW_NAME_TRANSLATION",
|
||||
)
|
||||
|
||||
if config.specific_integrations:
|
||||
|
@ -532,12 +532,15 @@ def validate_translation_file( # noqa: C901
|
|||
if parts or key not in search:
|
||||
integration.add_error(
|
||||
"translations",
|
||||
f"{reference['source']} contains invalid reference {reference['ref']}: Could not find {key}",
|
||||
f"{reference['source']} contains invalid reference"
|
||||
f"{reference['ref']}: Could not find {key}",
|
||||
)
|
||||
elif match := re.match(RE_REFERENCE, search[key]):
|
||||
integration.add_error(
|
||||
"translations",
|
||||
f"Lokalise supports only one level of references: \"{reference['source']}\" should point to directly to \"{match.groups()[0]}\"",
|
||||
"Lokalise supports only one level of references: "
|
||||
f'"{reference["source"]}" should point to directly '
|
||||
f'to "{match.groups()[0]}"',
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ def gather_new_integration(determine_auth: bool) -> Info:
|
|||
"prompt": (
|
||||
f"""How will your integration gather data?
|
||||
|
||||
Valid values are {', '.join(SUPPORTED_IOT_CLASSES)}
|
||||
Valid values are {", ".join(SUPPORTED_IOT_CLASSES)}
|
||||
|
||||
More info @ https://developers.home-assistant.io/docs/creating_integration_manifest#iot-class
|
||||
"""
|
||||
|
|
|
@ -79,7 +79,7 @@ class BucketHolder:
|
|||
"""Create output file."""
|
||||
with Path("pytest_buckets.txt").open("w") as file:
|
||||
for idx, bucket in enumerate(self._buckets):
|
||||
print(f"Bucket {idx+1} has {bucket.total_tests} tests")
|
||||
print(f"Bucket {idx + 1} has {bucket.total_tests} tests")
|
||||
file.write(bucket.get_paths_line())
|
||||
|
||||
|
||||
|
|
|
@ -777,7 +777,7 @@ async def _check_config_flow_result_translations(
|
|||
translation_errors,
|
||||
category,
|
||||
integration,
|
||||
f"{key_prefix}abort.{result["reason"]}",
|
||||
f"{key_prefix}abort.{result['reason']}",
|
||||
result["description_placeholders"],
|
||||
)
|
||||
|
||||
|
|
|
@ -580,7 +580,7 @@ async def hass(
|
|||
exceptions.append(
|
||||
Exception(
|
||||
"Received exception handler without exception, "
|
||||
f"but with message: {context["message"]}"
|
||||
f"but with message: {context['message']}"
|
||||
)
|
||||
)
|
||||
orig_exception_handler(loop, context)
|
||||
|
|
|
@ -74,7 +74,7 @@ def test_load_translations_files_by_language(
|
|||
"name": "Other 4",
|
||||
"unit_of_measurement": "quantities",
|
||||
},
|
||||
"outlet": {"name": "Outlet " "{placeholder}"},
|
||||
"outlet": {"name": "Outlet {placeholder}"},
|
||||
}
|
||||
},
|
||||
"something": "else",
|
||||
|
|
|
@ -376,7 +376,7 @@ def override_syrupy_finish(self: SnapshotSession) -> int:
|
|||
with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f:
|
||||
f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT"))
|
||||
with open(
|
||||
f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result",
|
||||
f".pytest_syrupy_{os.getenv('PYTEST_XDIST_WORKER')}_result",
|
||||
"w",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
|
|
|
@ -7266,9 +7266,9 @@ async def test_unique_id_collision_issues(
|
|||
mock_setup_entry = AsyncMock(return_value=True)
|
||||
for i in range(3):
|
||||
mock_integration(
|
||||
hass, MockModule(f"test{i+1}", async_setup_entry=mock_setup_entry)
|
||||
hass, MockModule(f"test{i + 1}", async_setup_entry=mock_setup_entry)
|
||||
)
|
||||
mock_platform(hass, f"test{i+1}.config_flow", None)
|
||||
mock_platform(hass, f"test{i + 1}.config_flow", None)
|
||||
|
||||
test2_group_1: list[MockConfigEntry] = []
|
||||
test2_group_2: list[MockConfigEntry] = []
|
||||
|
|
Loading…
Reference in New Issue