Compare commits

...

9 Commits

Author SHA1 Message Date
Wendelin
698412a4d3 Add async-timeout exception for frontend githubapi 2026-01-21 13:07:00 +01:00
Wendelin
20d14a6e00 Fix tests 2026-01-21 10:11:19 +01:00
Wendelin
900c2f881e fix backup exclude 2026-01-21 10:09:54 +01:00
Wendelin
2cf406f66c Update PR_CACHE_DIR to use 'frontend_development_artifacts' 2026-01-21 08:31:37 +01:00
Wendelin
7bba6f0e26 Reorganize import of download_pr_artifact for frontend setup 2026-01-21 08:20:50 +01:00
Wendelin
460fa5a052 Fix exception handling and update tests for async migration
- Use specific aiogithubapi exception types instead of checking status attribute
- Handle GitHubAuthenticationException for 401 errors
- Handle GitHubRatelimitException and GitHubPermissionException for 403 errors
- Handle GitHubNotFoundException for 404 errors
- Update tests to use aiogithubapi and aiohttp mocks
- Replace PyGithub mocks with GitHubAPI.generic() mocks
- Replace requests mocks with aioclient_mock (AiohttpClientMocker)
- Add test for GitHub API error handling

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-20 17:58:50 +01:00
Wendelin
bebc6a63f1 Migrate PR download to async libraries
Replace sync PyGithub with aiogithubapi and requests with aiohttp.
- Use aiogithubapi.GitHubAPI for GitHub API calls
- Use aiohttp_client.async_get_clientsession for HTTP downloads
- Remove executor job calls for API operations (now fully async)
- Keep executor jobs only for blocking I/O (file operations, zip extraction)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-20 17:43:47 +01:00
Wendelin
689415d021 Eliminate duplicate error messages in PR download
Move GitHub error messages to module-level constants to avoid duplication
across _get_pr_head_sha and _download_artifact_data functions.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-20 15:33:34 +01:00
Wendelin
07de504f54 Refactor error handling in frontend PR download
Use dictionary mapping for HTTP error messages to eliminate code duplication.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-20 15:23:51 +01:00
10 changed files with 664 additions and 3 deletions

View File

@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
"tmp_backups/*.tar",
"OZW_Log.txt",
"tts/*",
"frontend_development_artifacts/*",
]
EXCLUDE_DATABASE_FROM_BACKUP = [

View File

@@ -52,6 +52,10 @@ CONF_EXTRA_MODULE_URL = "extra_module_url"
CONF_EXTRA_JS_URL_ES5 = "extra_js_url_es5"
CONF_FRONTEND_REPO = "development_repo"
CONF_JS_VERSION = "javascript_version"
CONF_DEVELOPMENT_PR = "development_pr"
CONF_GITHUB_TOKEN = "github_token"
PR_CACHE_DIR = "frontend_development_artifacts"
DEFAULT_THEME_COLOR = "#2980b9"
@@ -129,7 +133,9 @@ CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
vol.Optional(CONF_FRONTEND_REPO): cv.path,
vol.Optional(CONF_DEVELOPMENT_PR): cv.positive_int,
vol.Optional(CONF_GITHUB_TOKEN): cv.string,
vol.Optional(CONF_THEMES): vol.All(dict, _validate_themes),
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
cv.ensure_list, [cv.string]
@@ -394,7 +400,17 @@ def add_manifest_json_key(key: str, val: Any) -> None:
def _frontend_root(dev_repo_path: str | None) -> pathlib.Path:
"""Return root path to the frontend files."""
if dev_repo_path is not None:
return pathlib.Path(dev_repo_path) / "hass_frontend"
dev_frontend_path = pathlib.Path(dev_repo_path) / "hass_frontend"
if dev_frontend_path.exists() and dev_frontend_path.is_dir():
_LOGGER.info("Using frontend development repo: %s", dev_repo_path)
return dev_frontend_path
_LOGGER.error(
"Frontend development repo path does not exist: %s, "
"falling back to the integrated frontend",
dev_repo_path,
)
# Keep import here so that we can import frontend without installing reqs
import hass_frontend # noqa: PLC0415
@@ -421,7 +437,43 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
key,
)
# Handle development configuration with priority
repo_path = conf.get(CONF_FRONTEND_REPO)
dev_pr_number = conf.get(CONF_DEVELOPMENT_PR)
# Priority: development_repo > development_pr > integrated
if repo_path and dev_pr_number:
_LOGGER.warning(
"Both development_repo and development_pr are configured. "
"Using development_repo (takes precedence). "
"Remove development_repo to use automatic PR download"
)
dev_pr_number = None # Disable PR download
if dev_pr_number:
pr_cache_dir = pathlib.Path(hass.config.config_dir) / PR_CACHE_DIR
github_token = conf.get(CONF_GITHUB_TOKEN)
# Keep import here so that we can import frontend without installing reqs
from .pr_download import download_pr_artifact # noqa: PLC0415
# Download PR artifact
dev_pr_dir = await download_pr_artifact(
hass, dev_pr_number, github_token, pr_cache_dir
)
if dev_pr_dir is None:
_LOGGER.error(
"Failed to download PR #%s, falling back to the integrated frontend",
dev_pr_number,
)
repo_path = None
else:
# frontend_dir is .../frontend_development_artifacts/<pr_number>/hass_frontend
# We need to pass .../frontend_development_artifacts/<pr_number> to _frontend_root
repo_path = str(dev_pr_dir.parent)
_LOGGER.info("Using frontend from PR #%s", dev_pr_number)
is_dev = repo_path is not None
root_path = _frontend_root(repo_path)

View File

@@ -23,5 +23,8 @@
"winter_mode": {}
},
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20260107.2"]
"requirements": [
"home-assistant-frontend==20260107.2",
"aiogithubapi==24.6.0"
]
}

View File

@@ -0,0 +1,259 @@
"""GitHub PR artifact download functionality for frontend development."""
from __future__ import annotations
import io
import logging
import pathlib
import shutil
import zipfile
from aiogithubapi import (
GitHubAPI,
GitHubAuthenticationException,
GitHubException,
GitHubNotFoundException,
GitHubPermissionException,
GitHubRatelimitException,
)
from aiohttp import ClientError, ClientResponseError, ClientTimeout
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
_LOGGER = logging.getLogger(__name__)
GITHUB_REPO = "home-assistant/frontend"
ARTIFACT_NAME = "frontend-build"
CACHE_WARNING_SIZE_MB = 500
# Error messages
ERROR_INVALID_TOKEN = (
"GitHub token is invalid or expired. "
"Please check your github_token in the frontend configuration. "
"Generate a new token at https://github.com/settings/tokens"
)
ERROR_RATE_LIMIT = (
"GitHub API rate limit exceeded or token lacks permissions. "
"Ensure your token has 'repo' or 'public_repo' scope"
)
def _get_directory_size_mb(directory: pathlib.Path) -> float:
"""Calculate total size of directory in MB (runs in executor)."""
total = sum(f.stat().st_size for f in directory.rglob("*") if f.is_file())
return total / (1024 * 1024)
async def _get_pr_head_sha(client: GitHubAPI, pr_number: int) -> str:
"""Get the head SHA for the PR."""
try:
response = await client.generic(
endpoint=f"/repos/home-assistant/frontend/pulls/{pr_number}",
)
return str(response.data["head"]["sha"])
except GitHubAuthenticationException as err:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
except (GitHubRatelimitException, GitHubPermissionException) as err:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
except GitHubNotFoundException as err:
raise HomeAssistantError(
f"PR #{pr_number} does not exist in repository {GITHUB_REPO}"
) from err
except GitHubException as err:
raise HomeAssistantError(f"GitHub API error: {err}") from err
async def _find_pr_artifact(client: GitHubAPI, pr_number: int, head_sha: str) -> str:
"""Find the build artifact for the given PR and commit SHA.
Returns the artifact download URL.
"""
try:
# Get workflow runs for the commit
response = await client.generic(
endpoint="/repos/home-assistant/frontend/actions/workflows/ci.yaml/runs",
params={"head_sha": head_sha, "per_page": 10},
)
# Find the most recent successful run for this commit
for run in response.data.get("workflow_runs", []):
if run["status"] == "completed" and run["conclusion"] == "success":
# Get artifacts for this run
artifacts_response = await client.generic(
endpoint=f"/repos/home-assistant/frontend/actions/runs/{run['id']}/artifacts",
)
# Find the frontend-build artifact
for artifact in artifacts_response.data.get("artifacts", []):
if artifact["name"] == ARTIFACT_NAME:
_LOGGER.info(
"Found artifact '%s' from CI run #%s",
ARTIFACT_NAME,
run["id"],
)
return str(artifact["archive_download_url"])
raise HomeAssistantError(
f"No '{ARTIFACT_NAME}' artifact found for PR #{pr_number}. "
"Possible reasons: CI has not run yet or is running, "
"or the build failed, or the PR artifact expired. "
f"Check https://github.com/{GITHUB_REPO}/pull/{pr_number}/checks"
)
except GitHubAuthenticationException as err:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
except (GitHubRatelimitException, GitHubPermissionException) as err:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
except GitHubException as err:
raise HomeAssistantError(f"GitHub API error: {err}") from err
async def _download_artifact_data(
hass: HomeAssistant, artifact_url: str, github_token: str
) -> bytes:
"""Download artifact data from GitHub."""
session = async_get_clientsession(hass)
headers = {
"Authorization": f"token {github_token}",
"Accept": "application/vnd.github+json",
}
try:
response = await session.get(
artifact_url, headers=headers, timeout=ClientTimeout(total=60)
)
response.raise_for_status()
return await response.read()
except ClientResponseError as err:
if err.status == 401:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
if err.status == 403:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
raise HomeAssistantError(
f"Failed to download artifact: HTTP {err.status}"
) from err
except TimeoutError as err:
raise HomeAssistantError(
"Timeout downloading artifact (>60s). Check your network connection"
) from err
except ClientError as err:
raise HomeAssistantError(f"Network error downloading artifact: {err}") from err
def _extract_artifact(
artifact_data: bytes,
pr_dir: pathlib.Path,
frontend_dir: pathlib.Path,
head_sha: str,
) -> None:
"""Extract artifact and save SHA (runs in executor)."""
if pr_dir.exists():
shutil.rmtree(pr_dir)
frontend_dir.mkdir(parents=True, exist_ok=True)
with zipfile.ZipFile(io.BytesIO(artifact_data)) as zip_file:
zip_file.extractall(str(frontend_dir))
# Save the commit SHA for cache validation
sha_file = pr_dir / ".sha"
sha_file.write_text(head_sha)
async def download_pr_artifact(
hass: HomeAssistant,
pr_number: int,
github_token: str | None,
cache_dir: pathlib.Path,
) -> pathlib.Path | None:
"""Download and extract frontend PR artifact from GitHub.
Returns the path to the extracted hass_frontend directory, or None on failure.
"""
# GitHub token is required to download artifacts
if not github_token:
_LOGGER.error(
"GitHub token is required to download PR artifacts. "
"Add 'github_token' to your frontend configuration"
)
return None
# Create GitHub API client
client = GitHubAPI(
token=github_token,
session=async_get_clientsession(hass),
)
# Get the current head SHA for this PR
try:
head_sha = await _get_pr_head_sha(client, pr_number)
except HomeAssistantError as err:
_LOGGER.error("%s", err)
return None
# Check if we have this exact version cached
pr_dir = cache_dir / str(pr_number)
frontend_dir = pr_dir / "hass_frontend"
sha_file = pr_dir / ".sha"
# Check if cached version matches current commit
if frontend_dir.exists() and sha_file.exists():
cached_sha = await hass.async_add_executor_job(sha_file.read_text)
if cached_sha.strip() == head_sha:
_LOGGER.info(
"Using cached PR #%s (commit %s) from %s",
pr_number,
head_sha[:8],
pr_dir,
)
return frontend_dir
_LOGGER.info(
"PR #%s has new commits (cached: %s, current: %s), re-downloading",
pr_number,
cached_sha[:8],
head_sha[:8],
)
try:
# Find the artifact
artifact_url = await _find_pr_artifact(client, pr_number, head_sha)
# Download artifact
_LOGGER.info("Downloading frontend PR #%s artifact", pr_number)
artifact_data = await _download_artifact_data(hass, artifact_url, github_token)
# Extract artifact
await hass.async_add_executor_job(
_extract_artifact, artifact_data, pr_dir, frontend_dir, head_sha
)
_LOGGER.info(
"Successfully downloaded and extracted PR #%s (commit %s) to %s",
pr_number,
head_sha[:8],
pr_dir,
)
size_mb = await hass.async_add_executor_job(_get_directory_size_mb, pr_dir)
_LOGGER.info("PR #%s cache size: %.1f MB", pr_number, size_mb)
# Warn if total cache size exceeds threshold
total_cache_size = await hass.async_add_executor_job(
_get_directory_size_mb, cache_dir
)
if total_cache_size > CACHE_WARNING_SIZE_MB:
_LOGGER.warning(
"Frontend PR cache directory has grown to %.1f MB (threshold: %d MB). "
"Consider manually cleaning up old PR caches in %s",
total_cache_size,
CACHE_WARNING_SIZE_MB,
cache_dir,
)
except HomeAssistantError as err:
_LOGGER.error("%s", err)
return None
except Exception:
_LOGGER.exception("Unexpected error downloading PR #%s", pr_number)
return None
else:
return frontend_dir

View File

@@ -3,6 +3,7 @@
aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
aiodns==4.0.0
aiogithubapi==24.6.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0

1
requirements_all.txt generated
View File

@@ -264,6 +264,7 @@ aioflo==2021.11.0
# homeassistant.components.yi
aioftp==0.21.3
# homeassistant.components.frontend
# homeassistant.components.github
aiogithubapi==24.6.0

View File

@@ -252,6 +252,7 @@ aiofiles==24.1.0
# homeassistant.components.flo
aioflo==2021.11.0
# homeassistant.components.frontend
# homeassistant.components.github
aiogithubapi==24.6.0

View File

@@ -150,6 +150,7 @@ FORBIDDEN_PACKAGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
},
"flux_led": {"flux-led": {"async-timeout"}},
"foobot": {"foobot-async": {"async-timeout"}},
"frontend": {"aiogithubapi": {"async-timeout"}},
"github": {"aiogithubapi": {"async-timeout"}},
"guardian": {
# https://github.com/jsbronder/asyncio-dgram/issues/20

View File

@@ -0,0 +1,68 @@
"""Fixtures for frontend tests."""
from __future__ import annotations
from collections.abc import Generator
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
@pytest.fixture
def mock_github_api() -> Generator[AsyncMock]:
"""Mock aiogithubapi GitHubAPI."""
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
# Mock PR response
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
# Mock workflow runs response
workflow_response = AsyncMock()
workflow_response.data = {
"workflow_runs": [
{
"id": 12345,
"status": "completed",
"conclusion": "success",
}
]
}
# Mock artifacts response
artifacts_response = AsyncMock()
artifacts_response.data = {
"artifacts": [
{
"name": "frontend-build",
"archive_download_url": "https://api.github.com/artifact/download",
}
]
}
# Setup generic method to return appropriate responses
async def generic_side_effect(endpoint, **kwargs):
if "pulls" in endpoint:
return pr_response
if "workflows" in endpoint and "runs" in endpoint:
return workflow_response
if "artifacts" in endpoint:
return artifacts_response
raise ValueError(f"Unexpected endpoint: {endpoint}")
mock_client.generic.side_effect = generic_side_effect
yield mock_client
@pytest.fixture
def mock_zipfile() -> Generator[MagicMock]:
"""Mock zipfile extraction."""
with patch("zipfile.ZipFile") as mock_zip:
mock_zip_instance = MagicMock()
mock_zip.return_value.__enter__.return_value = mock_zip_instance
yield mock_zip_instance

View File

@@ -0,0 +1,274 @@
"""Tests for frontend PR download functionality."""
from __future__ import annotations
from pathlib import Path
from unittest.mock import AsyncMock, patch
from aiogithubapi import (
GitHubAuthenticationException,
GitHubException,
GitHubNotFoundException,
GitHubPermissionException,
GitHubRatelimitException,
)
from aiohttp import ClientError
import pytest
from homeassistant.components.frontend import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_pr_download_success(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test successful PR artifact download."""
hass.config.config_dir = str(tmp_path)
# Mock artifact download
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify GitHub API was called
assert mock_github_api.generic.call_count >= 2 # PR + workflow runs
# Verify artifact was downloaded
assert len(aioclient_mock.mock_calls) == 1
# Verify zip was extracted
mock_zipfile.extractall.assert_called_once()
async def test_pr_download_uses_cache(
hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that cached PR is used when commit hasn't changed."""
hass.config.config_dir = str(tmp_path)
# Create fake cache
pr_cache_dir = tmp_path / "frontend_development_artifacts" / "12345"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("abc123def456")
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
# Mock PR response with same SHA as cache
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
mock_client.generic.return_value = pr_response
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify cache was used
assert "Using cached PR #12345" in caplog.text
# Should only call GitHub API to get PR SHA, not download
# The generic call should only be for getting the PR
calls = list(mock_client.generic.call_args_list)
assert len(calls) == 1 # Only PR check
assert "pulls" in str(calls[0])
async def test_pr_download_cache_invalidated(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test that cache is invalidated when commit changes."""
hass.config.config_dir = str(tmp_path)
# Create fake cache with old commit
pr_cache_dir = tmp_path / "frontend_development_artifacts" / "12345"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("old_commit_sha")
# Mock artifact download
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Should download - commit changed
assert len(aioclient_mock.mock_calls) == 1
async def test_pr_download_missing_token(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that PR download fails gracefully without token."""
config = {
DOMAIN: {
"development_pr": 12345,
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify error was logged
assert "GitHub token is required to download PR artifacts" in caplog.text
@pytest.mark.parametrize(
("exc", "error_message"),
[
(GitHubAuthenticationException("Unauthorized"), "invalid or expired"),
(GitHubRatelimitException("Rate limit exceeded"), "rate limit"),
(GitHubPermissionException("Forbidden"), "rate limit"),
(GitHubNotFoundException("Not found"), "does not exist"),
(GitHubException("API error"), "api error"),
],
)
async def test_pr_download_github_errors(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
exc: Exception,
error_message: str,
) -> None:
"""Test handling of various GitHub API errors."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
mock_client.generic.side_effect = exc
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
# Should not raise, just log error and fall back
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify appropriate error was logged
assert error_message in caplog.text.lower()
assert "Failed to download PR #12345" in caplog.text
async def test_pr_download_artifact_not_found(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling when artifact is not found."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
# Mock PR response
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
# Mock workflow runs response with no artifacts
workflow_response = AsyncMock()
workflow_response.data = {"workflow_runs": []}
async def generic_side_effect(endpoint, **kwargs):
if "pulls" in endpoint:
return pr_response
if "workflows" in endpoint:
return workflow_response
raise ValueError(f"Unexpected endpoint: {endpoint}")
mock_client.generic.side_effect = generic_side_effect
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify error was logged
assert "No 'frontend-build' artifact found" in caplog.text
async def test_pr_download_http_error(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of HTTP download errors."""
hass.config.config_dir = str(tmp_path)
# Mock artifact download failure
aioclient_mock.get(
"https://api.github.com/artifact/download",
exc=ClientError("Download failed"),
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify error was logged
assert "Failed to download PR #12345" in caplog.text