From 4f20776e0e130f865fdd5ac351fb40275103b173 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 15 Aug 2025 15:44:47 +0200 Subject: [PATCH] Add check for dependency package names in hassfest (#150630) --- script/hassfest/requirements.py | 128 +++++++++++++++++++++++++++- tests/hassfest/test_requirements.py | 118 +++++++++++++++++++++++++ 2 files changed, 245 insertions(+), 1 deletion(-) diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index 9b5334823b9..3f9cc5a0f8a 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -3,8 +3,9 @@ from __future__ import annotations from collections import deque +from collections.abc import Collection from functools import cache -from importlib.metadata import metadata +from importlib.metadata import files, metadata import json import os import re @@ -300,6 +301,64 @@ FORBIDDEN_PACKAGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = { }, } +FORBIDDEN_PACKAGE_NAMES: set[str] = { + "doc", + "docs", + "test", + "tests", +} +FORBIDDEN_PACKAGE_FILES_EXCEPTIONS = { + # In the form dict("domain": {"package": {"reason1", "reason2"}}) + # - domain is the integration domain + # - package is the package (can be transitive) referencing the dependency + # - reasonX should be the name of the invalid dependency + # https://github.com/jaraco/jaraco.net + "abode": {"jaraco-abode": {"jaraco-net"}}, + # https://github.com/coinbase/coinbase-advanced-py + "coinbase": {"homeassistant": {"coinbase-advanced-py"}}, + # https://github.com/ggrammar/pizzapi + "dominos": {"homeassistant": {"pizzapi"}}, + # https://github.com/u9n/dlms-cosem + "dsmr": {"dsmr-parser": {"dlms-cosem"}}, + # https://github.com/ChrisMandich/PyFlume # Fixed with >=0.7.1 + "flume": {"homeassistant": {"pyflume"}}, + # https://github.com/fortinet-solutions-cse/fortiosapi + "fortios": {"homeassistant": {"fortiosapi"}}, + # https://github.com/manzanotti/geniushub-client + "geniushub": {"homeassistant": {"geniushub-client"}}, + # https://github.com/basnijholt/aiokef + "kef": {"homeassistant": {"aiokef"}}, + # https://github.com/danifus/pyzipper + "knx": {"xknxproject": {"pyzipper"}}, + # https://github.com/hthiery/python-lacrosse + "lacrosse": {"homeassistant": {"pylacrosse"}}, + # ??? + "linode": {"homeassistant": {"linode-api"}}, + # https://github.com/timmo001/aiolyric + "lyric": {"homeassistant": {"aiolyric"}}, + # https://github.com/microBeesTech/pythonSDK/ + "microbees": {"homeassistant": {"microbeespy"}}, + # https://github.com/tiagocoutinho/async_modbus + "nibe_heatpump": {"nibe": {"async-modbus"}}, + # https://github.com/ejpenney/pyobihai + "obihai": {"homeassistant": {"pyobihai"}}, + # https://github.com/iamkubi/pydactyl + "pterodactyl": {"homeassistant": {"py-dactyl"}}, + # https://github.com/markusressel/raspyrfm-client + "raspyrfm": {"homeassistant": {"raspyrfm-client"}}, + # https://github.com/sstallion/sensorpush-api + "sensorpush_cloud": { + "homeassistant": {"sensorpush-api"}, + "sensorpush-ha": {"sensorpush-api"}, + }, + # https://github.com/smappee/pysmappee + "smappee": {"homeassistant": {"pysmappee"}}, + # https://github.com/watergate-ai/watergate-local-api-python + "watergate": {"homeassistant": {"watergate-local-api"}}, + # https://github.com/markusressel/xs1-api-client + "xs1": {"homeassistant": {"xs1-api-client"}}, +} + PYTHON_VERSION_CHECK_EXCEPTIONS: dict[str, dict[str, set[str]]] = { # In the form dict("domain": {"package": {"dependency1", "dependency2"}}) # - domain is the integration domain @@ -311,6 +370,8 @@ PYTHON_VERSION_CHECK_EXCEPTIONS: dict[str, dict[str, set[str]]] = { }, } +_packages_checked_files_cache: dict[str, set[str]] = {} + def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle requirements for integrations.""" @@ -476,6 +537,12 @@ def get_requirements(integration: Integration, packages: set[str]) -> set[str]: ) needs_forbidden_package_exceptions = False + packages_checked_files: set[str] = set() + forbidden_package_files_exceptions = FORBIDDEN_PACKAGE_FILES_EXCEPTIONS.get( + integration.domain, {} + ) + needs_forbidden_package_files_exception = False + package_version_check_exceptions = PACKAGE_CHECK_VERSION_RANGE_EXCEPTIONS.get( integration.domain, {} ) @@ -517,6 +584,17 @@ def get_requirements(integration: Integration, packages: set[str]) -> set[str]: f"({requires_python}) in {package}", ) + # Check package names + if package not in packages_checked_files: + packages_checked_files.add(package) + if not check_dependency_files( + integration, + "homeassistant", + package, + forbidden_package_files_exceptions.get("homeassistant", ()), + ): + needs_forbidden_package_files_exception = True + # Use inner loop to check dependencies # so we have access to the dependency parent (=current package) dependencies: dict[str, str] = item["dependencies"] @@ -540,6 +618,17 @@ def get_requirements(integration: Integration, packages: set[str]) -> set[str]: ): needs_package_version_check_exception = True + # Check package names + if pkg not in packages_checked_files: + packages_checked_files.add(pkg) + if not check_dependency_files( + integration, + package, + pkg, + forbidden_package_files_exceptions.get(package, ()), + ): + needs_forbidden_package_files_exception = True + to_check.extend(dependencies) if forbidden_package_exceptions and not needs_forbidden_package_exceptions: @@ -560,6 +649,15 @@ def get_requirements(integration: Integration, packages: set[str]) -> set[str]: f"Integration {integration.domain} version restrictions for Python have " "been resolved, please remove from `PYTHON_VERSION_CHECK_EXCEPTIONS`", ) + if ( + forbidden_package_files_exceptions + and not needs_forbidden_package_files_exception + ): + integration.add_error( + "requirements", + f"Integration {integration.domain} runtime files dependency exceptions " + "have been resolved, please remove from `FORBIDDEN_PACKAGE_FILES_EXCEPTIONS`", + ) return all_requirements @@ -635,6 +733,34 @@ def _is_dependency_version_range_valid( return False +def check_dependency_files( + integration: Integration, + package: str, + pkg: str, + package_exceptions: Collection[str], +) -> bool: + """Check dependency files for forbidden package names.""" + if (results := _packages_checked_files_cache.get(pkg)) is None: + top_level: set[str] = set() + for file in files(pkg) or (): + top = file.parts[0].lower() + if top.endswith((".dist-info", ".py")): + continue + top_level.add(top) + results = FORBIDDEN_PACKAGE_NAMES & top_level + _packages_checked_files_cache[pkg] = results + if not results: + return True + + for dir_name in results: + integration.add_warning_or_error( + pkg in package_exceptions, + "requirements", + f"Package {pkg} has a forbidden top level directory {dir_name} in {package}", + ) + return False + + def install_requirements(integration: Integration, requirements: set[str]) -> bool: """Install integration requirements. diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index dcd35a3aca7..944b06d3c90 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -1,5 +1,7 @@ """Tests for hassfest requirements.""" +from collections.abc import Generator +from importlib.metadata import PackagePath from pathlib import Path from unittest.mock import patch @@ -7,8 +9,11 @@ import pytest from script.hassfest.model import Config, Integration from script.hassfest.requirements import ( + FORBIDDEN_PACKAGE_NAMES, PACKAGE_CHECK_PREPARE_UPDATE, PACKAGE_CHECK_VERSION_RANGE, + _packages_checked_files_cache, + check_dependency_files, check_dependency_version_range, validate_requirements_format, ) @@ -35,6 +40,19 @@ def integration(): ) +@pytest.fixture +def mock_forbidden_package_names() -> Generator[None]: + """Fixture for FORBIDDEN_PACKAGE_NAMES.""" + # pylint: disable-next=global-statement + global FORBIDDEN_PACKAGE_NAMES # noqa: PLW0603 + original = FORBIDDEN_PACKAGE_NAMES.copy() + FORBIDDEN_PACKAGE_NAMES = {"test", "tests"} + try: + yield + finally: + FORBIDDEN_PACKAGE_NAMES = original + + def test_validate_requirements_format_with_space(integration: Integration) -> None: """Test validate requirement with space around separator.""" integration.manifest["requirements"] = ["test_package == 1"] @@ -149,3 +167,103 @@ def test_dependency_version_range_prepare_update( ) == result ) + + +@pytest.mark.usefixtures("mock_forbidden_package_names") +def test_check_dependency_files(integration: Integration) -> None: + """Test dependency files check for forbidden package names is working correctly.""" + package = "homeassistant" + pkg = "my_package" + + # Forbidden top level directories: test, tests + pkg_files = [ + PackagePath("my_package/__init__.py"), + PackagePath("my_package-1.0.0.dist-info/METADATA"), + PackagePath("tests/test_some_function.py"), + PackagePath("test/submodule/test_some_other_function.py"), + ] + with ( + patch( + "script.hassfest.requirements.files", return_value=pkg_files + ) as mock_files, + patch.dict(_packages_checked_files_cache, {}, clear=True), + ): + assert not _packages_checked_files_cache + assert check_dependency_files(integration, package, pkg, ()) is False + assert _packages_checked_files_cache[pkg] == {"tests", "test"} + assert len(integration.errors) == 2 + assert ( + f"Package {pkg} has a forbidden top level directory tests in {package}" + in x.error + for x in integration.errors + ) + assert ( + f"Package {pkg} has a forbidden top level directory test in {package}" + in x.error + for x in integration.errors + ) + integration.errors.clear() + + # Repeated call should use cache + assert check_dependency_files(integration, package, pkg, ()) is False + assert mock_files.call_count == 1 + assert len(integration.errors) == 2 + integration.errors.clear() + + # Exceptions set + pkg_files = [ + PackagePath("my_package/__init__.py"), + PackagePath("my_package.dist-info/METADATA"), + PackagePath("tests/test_some_function.py"), + ] + with ( + patch( + "script.hassfest.requirements.files", return_value=pkg_files + ) as mock_files, + patch.dict(_packages_checked_files_cache, {}, clear=True), + ): + assert not _packages_checked_files_cache + assert ( + check_dependency_files(integration, package, pkg, package_exceptions={pkg}) + is False + ) + assert _packages_checked_files_cache[pkg] == {"tests"} + assert len(integration.errors) == 0 + assert len(integration.warnings) == 1 + assert ( + f"Package {pkg} has a forbidden top level directory tests in {package}" + in x.error + for x in integration.warnings + ) + integration.warnings.clear() + + # Repeated call should use cache + assert ( + check_dependency_files(integration, package, pkg, package_exceptions={pkg}) + is False + ) + assert mock_files.call_count == 1 + assert len(integration.errors) == 0 + assert len(integration.warnings) == 1 + integration.warnings.clear() + + # All good + pkg_files = [ + PackagePath("my_package/__init__.py"), + PackagePath("my_package.dist-info/METADATA"), + ] + with ( + patch( + "script.hassfest.requirements.files", return_value=pkg_files + ) as mock_files, + patch.dict(_packages_checked_files_cache, {}, clear=True), + ): + assert not _packages_checked_files_cache + assert check_dependency_files(integration, package, pkg, ()) is True + assert _packages_checked_files_cache[pkg] == set() + assert len(integration.errors) == 0 + + # Repeated call should use cache + assert check_dependency_files(integration, package, pkg, ()) is True + assert mock_files.call_count == 1 + assert len(integration.errors) == 0