1
0
mirror of https://github.com/home-assistant/core.git synced 2026-02-15 07:36:16 +00:00

Add option to use frontend PR artifact to frontend integration (#161291)

Co-authored-by: Claude Sonnet 4.5 <noreply@anthropic.com>
Co-authored-by: Erik Montnemery <erik@montnemery.com>
This commit is contained in:
Wendelin
2026-02-03 10:23:25 +01:00
committed by GitHub
parent d219056e9d
commit e6a60dfe50
11 changed files with 1066 additions and 0 deletions

View File

@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
"tmp_backups/*.tar",
"OZW_Log.txt",
"tts/*",
".cache/*",
]
EXCLUDE_DATABASE_FROM_BACKUP = [

View File

@@ -7,6 +7,7 @@ from functools import lru_cache, partial
import logging
import os
import pathlib
import shutil
from typing import Any, TypedDict
from aiohttp import hdrs, web, web_urldispatcher
@@ -36,6 +37,7 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.util.hass_dict import HassKey
from .pr_download import download_pr_artifact
from .storage import (
async_setup_frontend_storage,
async_system_store as async_system_store,
@@ -55,6 +57,10 @@ CONF_EXTRA_MODULE_URL = "extra_module_url"
CONF_EXTRA_JS_URL_ES5 = "extra_js_url_es5"
CONF_FRONTEND_REPO = "development_repo"
CONF_JS_VERSION = "javascript_version"
CONF_DEVELOPMENT_PR = "development_pr"
CONF_GITHUB_TOKEN = "github_token"
DEV_ARTIFACTS_DIR = "development_artifacts"
DEFAULT_THEME_COLOR = "#2980b9"
@@ -133,6 +139,8 @@ CONFIG_SCHEMA = vol.Schema(
DOMAIN: vol.Schema(
{
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
vol.Inclusive(CONF_DEVELOPMENT_PR, "development_pr"): cv.positive_int,
vol.Inclusive(CONF_GITHUB_TOKEN, "development_pr"): cv.string,
vol.Optional(CONF_THEMES): vol.All(dict, _validate_themes),
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
cv.ensure_list, [cv.string]
@@ -425,6 +433,49 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
)
repo_path = conf.get(CONF_FRONTEND_REPO)
dev_pr_number = conf.get(CONF_DEVELOPMENT_PR)
pr_cache_dir = pathlib.Path(hass.config.cache_path(DOMAIN, DEV_ARTIFACTS_DIR))
if not dev_pr_number and pr_cache_dir.exists():
try:
await hass.async_add_executor_job(shutil.rmtree, pr_cache_dir)
_LOGGER.debug("Cleaned up frontend development artifacts")
except OSError as err:
_LOGGER.warning(
"Could not clean up frontend development artifacts: %s", err
)
# Priority: development_repo > development_pr > integrated
if repo_path and dev_pr_number:
_LOGGER.warning(
"Both development_repo and development_pr are specified for frontend. "
"Using development_repo, remove development_repo to use "
"automatic PR download"
)
dev_pr_number = None
if dev_pr_number:
github_token: str = conf[CONF_GITHUB_TOKEN]
try:
dev_pr_dir = await download_pr_artifact(
hass, dev_pr_number, github_token, pr_cache_dir
)
repo_path = str(dev_pr_dir)
_LOGGER.info("Using frontend from PR #%s", dev_pr_number)
except HomeAssistantError as err:
_LOGGER.error(
"Failed to download PR #%s: %s, falling back to the integrated frontend",
dev_pr_number,
err,
)
except Exception: # pylint: disable=broad-exception-caught
_LOGGER.exception(
"Unexpected error downloading PR #%s, "
"falling back to the integrated frontend",
dev_pr_number,
)
is_dev = repo_path is not None
root_path = _frontend_root(repo_path)

View File

@@ -0,0 +1,242 @@
"""GitHub PR artifact download functionality for frontend development."""
from __future__ import annotations
import io
import logging
import pathlib
import shutil
import zipfile
from aiogithubapi import (
GitHubAPI,
GitHubAuthenticationException,
GitHubException,
GitHubNotFoundException,
GitHubPermissionException,
GitHubRatelimitException,
)
from aiohttp import ClientError, ClientResponseError, ClientTimeout
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
_LOGGER = logging.getLogger(__name__)
GITHUB_REPO = "home-assistant/frontend"
ARTIFACT_NAME = "frontend-build"
# Zip bomb protection limits (10x typical frontend build size)
# Typical frontend build: ~4500 files, ~135MB uncompressed
MAX_ZIP_FILES = 50000
MAX_ZIP_SIZE = 1500 * 1024 * 1024 # 1.5GB
ERROR_INVALID_TOKEN = (
"GitHub token is invalid or expired. "
"Please check your github_token in the frontend configuration. "
"Generate a new token at https://github.com/settings/tokens"
)
ERROR_RATE_LIMIT = (
"GitHub API rate limit exceeded or token lacks permissions. "
"Ensure your token has 'repo' or 'public_repo' scope"
)
async def _get_pr_head_sha(client: GitHubAPI, pr_number: int) -> str:
"""Get the head SHA for the PR."""
try:
response = await client.generic(
endpoint=f"/repos/home-assistant/frontend/pulls/{pr_number}",
)
return str(response.data["head"]["sha"])
except GitHubAuthenticationException as err:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
except (GitHubRatelimitException, GitHubPermissionException) as err:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
except GitHubNotFoundException as err:
raise HomeAssistantError(
f"PR #{pr_number} does not exist in repository {GITHUB_REPO}"
) from err
except GitHubException as err:
raise HomeAssistantError(f"GitHub API error: {err}") from err
async def _find_pr_artifact(client: GitHubAPI, pr_number: int, head_sha: str) -> str:
"""Find the build artifact for the given PR and commit SHA.
Returns the artifact download URL.
"""
try:
response = await client.generic(
endpoint="/repos/home-assistant/frontend/actions/workflows/ci.yaml/runs",
params={"head_sha": head_sha, "per_page": 10},
)
for run in response.data.get("workflow_runs", []):
if run["status"] == "completed" and run["conclusion"] == "success":
artifacts_response = await client.generic(
endpoint=f"/repos/home-assistant/frontend/actions/runs/{run['id']}/artifacts",
)
for artifact in artifacts_response.data.get("artifacts", []):
if artifact["name"] == ARTIFACT_NAME:
_LOGGER.info(
"Found artifact '%s' from CI run #%s",
ARTIFACT_NAME,
run["id"],
)
return str(artifact["archive_download_url"])
raise HomeAssistantError(
f"No '{ARTIFACT_NAME}' artifact found for PR #{pr_number}. "
"Possible reasons: CI has not run yet or is running, "
"or the build failed, or the PR artifact expired. "
f"Check https://github.com/{GITHUB_REPO}/pull/{pr_number}/checks"
)
except GitHubAuthenticationException as err:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
except (GitHubRatelimitException, GitHubPermissionException) as err:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
except GitHubException as err:
raise HomeAssistantError(f"GitHub API error: {err}") from err
async def _download_artifact_data(
hass: HomeAssistant, artifact_url: str, github_token: str
) -> bytes:
"""Download artifact data from GitHub."""
session = async_get_clientsession(hass)
headers = {
"Authorization": f"token {github_token}",
"Accept": "application/vnd.github+json",
}
try:
response = await session.get(
artifact_url, headers=headers, timeout=ClientTimeout(total=60)
)
response.raise_for_status()
return await response.read()
except ClientResponseError as err:
if err.status == 401:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
if err.status == 403:
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
raise HomeAssistantError(
f"Failed to download artifact: HTTP {err.status}"
) from err
except TimeoutError as err:
raise HomeAssistantError(
"Timeout downloading artifact (>60s). Check your network connection"
) from err
except ClientError as err:
raise HomeAssistantError(f"Network error downloading artifact: {err}") from err
def _extract_artifact(
artifact_data: bytes,
cache_dir: pathlib.Path,
head_sha: str,
) -> None:
"""Extract artifact and save SHA (runs in executor)."""
frontend_dir = cache_dir / "hass_frontend"
if cache_dir.exists():
shutil.rmtree(cache_dir)
frontend_dir.mkdir(parents=True, exist_ok=True)
with zipfile.ZipFile(io.BytesIO(artifact_data)) as zip_file:
# Validate zip contents to protect against zip bombs
# See: https://github.com/python/cpython/issues/80643
total_size = 0
for file_count, info in enumerate(zip_file.infolist(), start=1):
total_size += info.file_size
if file_count > MAX_ZIP_FILES:
raise ValueError(
f"Zip contains too many files (>{MAX_ZIP_FILES}), possible zip bomb"
)
if total_size > MAX_ZIP_SIZE:
raise ValueError(
f"Zip uncompressed size too large (>{MAX_ZIP_SIZE} bytes), "
"possible zip bomb"
)
zip_file.extractall(str(frontend_dir))
# Save the commit SHA for cache validation
sha_file = cache_dir / ".sha"
sha_file.write_text(head_sha)
async def download_pr_artifact(
hass: HomeAssistant,
pr_number: int,
github_token: str,
tmp_dir: pathlib.Path,
) -> pathlib.Path:
"""Download and extract frontend PR artifact from GitHub.
Returns the path to the tmp directory containing hass_frontend/.
Raises HomeAssistantError on failure.
"""
try:
session = async_get_clientsession(hass)
except Exception as err:
raise HomeAssistantError(f"Failed to get HTTP client session: {err}") from err
client = GitHubAPI(token=github_token, session=session)
head_sha = await _get_pr_head_sha(client, pr_number)
frontend_dir = tmp_dir / "hass_frontend"
sha_file = tmp_dir / ".sha"
if frontend_dir.exists() and sha_file.exists():
try:
cached_sha = await hass.async_add_executor_job(sha_file.read_text)
if cached_sha.strip() == head_sha:
_LOGGER.info(
"Using cached PR #%s (commit %s) from %s",
pr_number,
head_sha[:8],
tmp_dir,
)
return tmp_dir
_LOGGER.info(
"PR #%s has new commits (cached: %s, current: %s), re-downloading",
pr_number,
cached_sha[:8],
head_sha[:8],
)
except OSError as err:
_LOGGER.debug("Failed to read cache SHA file: %s", err)
artifact_url = await _find_pr_artifact(client, pr_number, head_sha)
_LOGGER.info("Downloading frontend PR #%s artifact", pr_number)
artifact_data = await _download_artifact_data(hass, artifact_url, github_token)
try:
await hass.async_add_executor_job(
_extract_artifact, artifact_data, tmp_dir, head_sha
)
except zipfile.BadZipFile as err:
raise HomeAssistantError(
f"Downloaded artifact for PR #{pr_number} is corrupted or invalid"
) from err
except ValueError as err:
raise HomeAssistantError(
f"Downloaded artifact for PR #{pr_number} failed validation: {err}"
) from err
except OSError as err:
raise HomeAssistantError(
f"Failed to extract artifact for PR #{pr_number}: {err}"
) from err
_LOGGER.info(
"Successfully downloaded and extracted PR #%s (commit %s) to %s",
pr_number,
head_sha[:8],
tmp_dir,
)
return tmp_dir

View File

@@ -630,6 +630,16 @@ class Config:
"""
return os.path.join(self.config_dir, *path)
def cache_path(self, *path: str) -> str:
"""Generate path to the file within the cache directory.
The cache directory is used for temporary data that can be
regenerated and is not included in backups.
Async friendly.
"""
return self.path(".cache", *path)
def is_allowed_external_url(self, url: str) -> bool:
"""Check if an external URL is allowed."""
parsed_url = f"{yarl.URL(url)!s}/"

View File

@@ -3,6 +3,7 @@
aiodhcpwatcher==1.2.1
aiodiscover==2.7.1
aiodns==4.0.0
aiogithubapi==24.6.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0

View File

@@ -24,6 +24,10 @@ classifiers = [
requires-python = ">=3.14.2"
dependencies = [
"aiodns==4.0.0",
# aiogithubapi is needed by frontend; frontend is unconditionally imported at
# module level in `bootstrap.py` and its requirements thus need to be in
# requirements.txt to ensure they are always installed
"aiogithubapi==24.6.0",
# Integrations may depend on hassio integration without listing it to
# change behavior based on presence of supervisor. Deprecated with #127228
# Lib can be removed with 2025.11

1
requirements.txt generated
View File

@@ -4,6 +4,7 @@
# Home Assistant Core
aiodns==4.0.0
aiogithubapi==24.6.0
aiohasupervisor==0.3.3
aiohttp-asyncmdnsresolver==0.1.1
aiohttp-fast-zlib==0.3.0

View File

@@ -0,0 +1,72 @@
"""Fixtures for frontend tests."""
from __future__ import annotations
from collections.abc import Generator
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
@pytest.fixture
def mock_github_api() -> Generator[AsyncMock]:
"""Mock aiogithubapi GitHubAPI."""
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
# Mock PR response
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
# Mock workflow runs response
workflow_response = AsyncMock()
workflow_response.data = {
"workflow_runs": [
{
"id": 12345,
"status": "completed",
"conclusion": "success",
}
]
}
# Mock artifacts response
artifacts_response = AsyncMock()
artifacts_response.data = {
"artifacts": [
{
"name": "frontend-build",
"archive_download_url": "https://api.github.com/artifact/download",
}
]
}
# Setup generic method to return appropriate responses
async def generic_side_effect(endpoint, **kwargs):
if "pulls" in endpoint:
return pr_response
if "workflows" in endpoint and "runs" in endpoint:
return workflow_response
if "artifacts" in endpoint:
return artifacts_response
raise ValueError(f"Unexpected endpoint: {endpoint}")
mock_client.generic.side_effect = generic_side_effect
yield mock_client
@pytest.fixture
def mock_zipfile() -> Generator[MagicMock]:
"""Mock zipfile extraction."""
with patch("zipfile.ZipFile") as mock_zip:
mock_zip_instance = MagicMock()
# Mock infolist for zip bomb validation
mock_info = MagicMock()
mock_info.file_size = 1000 # Small file size
mock_zip_instance.infolist.return_value = [mock_info]
mock_zip.return_value.__enter__.return_value = mock_zip_instance
yield mock_zip_instance

View File

@@ -14,9 +14,12 @@ import pytest
import voluptuous as vol
from homeassistant.components.frontend import (
CONF_DEVELOPMENT_PR,
CONF_EXTRA_JS_URL_ES5,
CONF_EXTRA_MODULE_URL,
CONF_GITHUB_TOKEN,
CONF_THEMES,
CONFIG_SCHEMA,
DEFAULT_THEME_COLOR,
DOMAIN,
EVENT_PANELS_UPDATED,
@@ -33,6 +36,7 @@ from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
from tests.common import MockUser, async_capture_events, async_fire_time_changed
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import (
ClientSessionGenerator,
MockHAClientWebSocket,
@@ -1104,3 +1108,104 @@ async def test_www_local_dir(
client = await hass_client()
resp = await client.get("/local/x.txt")
assert resp.status == HTTPStatus.OK
async def test_development_pr_and_github_token_inclusive() -> None:
"""Test that development_pr and github_token must both be set or neither."""
# Both present - valid
valid_config = {
DOMAIN: {
CONF_DEVELOPMENT_PR: 12345,
CONF_GITHUB_TOKEN: "test_token",
}
}
assert CONFIG_SCHEMA(valid_config)
valid_config_empty: dict[str, dict[str, Any]] = {DOMAIN: {}}
assert CONFIG_SCHEMA(valid_config_empty)
invalid_config_pr_only = {
DOMAIN: {
CONF_DEVELOPMENT_PR: 12345,
}
}
with pytest.raises(vol.Invalid, match="some but not all"):
CONFIG_SCHEMA(invalid_config_pr_only)
invalid_config_token_only: dict[str, dict[str, Any]] = {
DOMAIN: {CONF_GITHUB_TOKEN: "test_token"}
}
with pytest.raises(vol.Invalid, match="some but not all"):
CONFIG_SCHEMA(invalid_config_token_only)
async def test_setup_with_development_pr_and_token(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test that setup succeeds when both development_pr and github_token are provided."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
CONF_DEVELOPMENT_PR: 12345,
CONF_GITHUB_TOKEN: "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Verify GitHub API was called
assert mock_github_api.generic.call_count >= 2 # PR + workflow runs
async def test_setup_cleans_up_pr_cache_when_not_configured(
hass: HomeAssistant,
tmp_path: Path,
) -> None:
"""Test that PR cache is cleaned up when no PR is configured."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
pr_cache_dir.mkdir(parents=True)
(pr_cache_dir / "test_file.txt").write_text("test")
config: dict[str, dict[str, Any]] = {DOMAIN: {}}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert not pr_cache_dir.exists()
async def test_setup_with_development_pr_unexpected_error(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that setup handles unexpected errors during PR download gracefully."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.download_pr_artifact",
side_effect=RuntimeError("Unexpected error"),
):
config = {
DOMAIN: {
CONF_DEVELOPMENT_PR: 12345,
CONF_GITHUB_TOKEN: "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Unexpected error downloading PR #12345" in caplog.text

View File

@@ -0,0 +1,560 @@
"""Tests for frontend PR download functionality."""
from __future__ import annotations
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
from aiogithubapi import (
GitHubAuthenticationException,
GitHubException,
GitHubNotFoundException,
GitHubPermissionException,
GitHubRatelimitException,
)
from aiohttp import ClientError
import pytest
from homeassistant.components.frontend import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_pr_download_success(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test successful PR artifact download."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert mock_github_api.generic.call_count >= 2 # PR + workflow runs
assert len(aioclient_mock.mock_calls) == 1
mock_zipfile.extractall.assert_called_once()
async def test_pr_download_uses_cache(
hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that cached PR is used when commit hasn't changed."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("abc123def456")
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
mock_client.generic.return_value = pr_response
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Using cached PR #12345" in caplog.text
calls = list(mock_client.generic.call_args_list)
assert len(calls) == 1 # Only PR check
assert "pulls" in str(calls[0])
async def test_pr_download_cache_invalidated(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test that cache is invalidated when commit changes."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("old_commit_sha")
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Should download - commit changed
assert len(aioclient_mock.mock_calls) == 1
async def test_pr_download_cache_sha_read_error(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
mock_zipfile: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that cache SHA read errors are handled gracefully."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
sha_file = pr_cache_dir / ".sha"
sha_file.write_text("abc123def456")
sha_file.chmod(0o000)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
try:
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
assert "Failed to read cache SHA file" in caplog.text
finally:
sha_file.chmod(0o644)
async def test_pr_download_session_error(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of session creation errors."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.async_get_clientsession",
side_effect=RuntimeError("Session error"),
):
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
@pytest.mark.parametrize(
("exc", "error_message"),
[
(GitHubAuthenticationException("Unauthorized"), "invalid or expired"),
(GitHubRatelimitException("Rate limit exceeded"), "rate limit"),
(GitHubPermissionException("Forbidden"), "rate limit"),
(GitHubNotFoundException("Not found"), "does not exist"),
(GitHubException("API error"), "api error"),
],
)
async def test_pr_download_github_errors(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
exc: Exception,
error_message: str,
) -> None:
"""Test handling of various GitHub API errors."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
mock_client.generic.side_effect = exc
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert error_message in caplog.text.lower()
assert "Failed to download PR #12345" in caplog.text
@pytest.mark.parametrize(
("exc", "error_message"),
[
(GitHubAuthenticationException("Unauthorized"), "invalid or expired"),
(GitHubRatelimitException("Rate limit exceeded"), "rate limit"),
(GitHubPermissionException("Forbidden"), "rate limit"),
(GitHubException("API error"), "api error"),
],
)
async def test_pr_download_artifact_search_github_errors(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
exc: Exception,
error_message: str,
) -> None:
"""Test handling of GitHub API errors during artifact search."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
async def generic_side_effect(endpoint, **_kwargs):
if "pulls" in endpoint:
return pr_response
raise exc
mock_client.generic.side_effect = generic_side_effect
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert error_message in caplog.text.lower()
assert "Failed to download PR #12345" in caplog.text
async def test_pr_download_artifact_not_found(
hass: HomeAssistant,
tmp_path: Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling when artifact is not found."""
hass.config.config_dir = str(tmp_path)
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
) as mock_gh_class:
mock_client = AsyncMock()
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
workflow_response = AsyncMock()
workflow_response.data = {"workflow_runs": []}
async def generic_side_effect(endpoint, **kwargs):
if "pulls" in endpoint:
return pr_response
if "workflows" in endpoint:
return workflow_response
raise ValueError(f"Unexpected endpoint: {endpoint}")
mock_client.generic.side_effect = generic_side_effect
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "No 'frontend-build' artifact found" in caplog.text
async def test_pr_download_http_error(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of HTTP download errors."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
exc=ClientError("Download failed"),
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
@pytest.mark.parametrize(
("status", "error_message"),
[
(401, "invalid or expired"),
(403, "rate limit"),
(500, "http 500"),
],
)
async def test_pr_download_http_status_errors(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
status: int,
error_message: str,
) -> None:
"""Test handling of HTTP status errors during artifact download."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
status=status,
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert error_message in caplog.text.lower()
assert "Failed to download PR #12345" in caplog.text
async def test_pr_download_timeout_error(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of timeout during artifact download."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
exc=TimeoutError("Connection timed out"),
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "timeout" in caplog.text.lower()
assert "Failed to download PR #12345" in caplog.text
async def test_pr_download_bad_zip_file(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of corrupted zip file."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"not a valid zip file",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
assert "corrupted or invalid" in caplog.text.lower()
async def test_pr_download_zip_bomb_too_many_files(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that zip bombs with too many files are rejected."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
with patch("zipfile.ZipFile") as mock_zip:
mock_zip_instance = MagicMock()
mock_info = MagicMock()
mock_info.file_size = 100
mock_zip_instance.infolist.return_value = [mock_info] * 55000
mock_zip.return_value.__enter__.return_value = mock_zip_instance
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
assert "too many files" in caplog.text.lower()
async def test_pr_download_zip_bomb_too_large(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test that zip bombs with excessive uncompressed size are rejected."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
with patch("zipfile.ZipFile") as mock_zip:
mock_zip_instance = MagicMock()
mock_info = MagicMock()
mock_info.file_size = 2 * 1024 * 1024 * 1024 # 2GB per file
mock_zip_instance.infolist.return_value = [mock_info]
mock_zip.return_value.__enter__.return_value = mock_zip_instance
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
assert "too large" in caplog.text.lower()
async def test_pr_download_extraction_os_error(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api: AsyncMock,
aioclient_mock: AiohttpClientMocker,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test handling of OS errors during extraction."""
hass.config.config_dir = str(tmp_path)
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
with patch("zipfile.ZipFile") as mock_zip:
mock_zip_instance = MagicMock()
mock_info = MagicMock()
mock_info.file_size = 100
mock_zip_instance.infolist.return_value = [mock_info]
mock_zip_instance.extractall.side_effect = OSError("Disk full")
mock_zip.return_value.__enter__.return_value = mock_zip_instance
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
assert "Failed to download PR #12345" in caplog.text
assert "failed to extract" in caplog.text.lower()

View File

@@ -882,6 +882,25 @@ async def test_config_path_with_dir_and_file() -> None:
assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf"
async def test_config_cache_path_with_file() -> None:
"""Test cache_path method with file."""
hass = Mock()
hass.data = {}
config = Config(hass, "/test/ha-config")
assert config.cache_path("test.cache") == "/test/ha-config/.cache/test.cache"
async def test_config_cache_path_with_dir_and_file() -> None:
"""Test cache_path method with dir and file."""
hass = Mock()
hass.data = {}
config = Config(hass, "/test/ha-config")
assert (
config.cache_path("dir", "test.cache")
== "/test/ha-config/.cache/dir/test.cache"
)
async def test_config_as_dict() -> None:
"""Test as dict."""
hass = Mock()