1
0
mirror of https://github.com/home-assistant/core.git synced 2026-02-15 07:36:16 +00:00

Enhance PR download functionality to retrieve both head and base SHAs, and update caching logic accordingly

This commit is contained in:
Wendelin
2026-02-13 14:50:18 +01:00
parent 54141ffd3f
commit b42d008e79
3 changed files with 70 additions and 22 deletions

View File

@@ -43,13 +43,13 @@ ERROR_RATE_LIMIT = (
)
async def _get_pr_head_sha(client: GitHubAPI, pr_number: int) -> str:
"""Get the head SHA for the PR."""
async def _get_pr_shas(client: GitHubAPI, pr_number: int) -> tuple[str, str]:
"""Get the head and base SHAs for a PR."""
try:
response = await client.generic(
endpoint=f"/repos/home-assistant/frontend/pulls/{pr_number}",
)
return str(response.data["head"]["sha"])
return str(response.data["head"]["sha"]), str(response.data["base"]["sha"])
except GitHubAuthenticationException as err:
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
except (GitHubRatelimitException, GitHubPermissionException) as err:
@@ -137,9 +137,9 @@ async def _download_artifact_data(
def _extract_artifact(
artifact_data: bytes,
cache_dir: pathlib.Path,
head_sha: str,
cache_key: str,
) -> None:
"""Extract artifact and save SHA (runs in executor)."""
"""Extract artifact and save cache key (runs in executor)."""
frontend_dir = cache_dir / "hass_frontend"
if cache_dir.exists():
@@ -163,9 +163,8 @@ def _extract_artifact(
)
zip_file.extractall(str(frontend_dir))
# Save the commit SHA for cache validation
sha_file = cache_dir / ".sha"
sha_file.write_text(head_sha)
sha_file.write_text(cache_key)
async def download_pr_artifact(
@@ -186,15 +185,16 @@ async def download_pr_artifact(
client = GitHubAPI(token=github_token, session=session)
head_sha = await _get_pr_head_sha(client, pr_number)
head_sha, base_sha = await _get_pr_shas(client, pr_number)
cache_key = f"{head_sha}:{base_sha}"
frontend_dir = tmp_dir / "hass_frontend"
sha_file = tmp_dir / ".sha"
if frontend_dir.exists() and sha_file.exists():
try:
cached_sha = await hass.async_add_executor_job(sha_file.read_text)
if cached_sha.strip() == head_sha:
cached_key = await hass.async_add_executor_job(sha_file.read_text)
if cached_key.strip() == cache_key:
_LOGGER.info(
"Using cached PR #%s (commit %s) from %s",
pr_number,
@@ -203,9 +203,9 @@ async def download_pr_artifact(
)
return tmp_dir
_LOGGER.info(
"PR #%s has new commits (cached: %s, current: %s), re-downloading",
"PR #%s cache outdated (cached: %s, current: %s), re-downloading",
pr_number,
cached_sha[:8],
cached_key[:8],
head_sha[:8],
)
except OSError as err:
@@ -218,7 +218,7 @@ async def download_pr_artifact(
try:
await hass.async_add_executor_job(
_extract_artifact, artifact_data, tmp_dir, head_sha
_extract_artifact, artifact_data, tmp_dir, cache_key
)
except zipfile.BadZipFile as err:
raise HomeAssistantError(

View File

@@ -19,7 +19,10 @@ def mock_github_api() -> Generator[AsyncMock]:
# Mock PR response
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
pr_response.data = {
"head": {"sha": "abc123def456"},
"base": {"sha": "base789abc012"},
}
# Mock workflow runs response
workflow_response = AsyncMock()

View File

@@ -64,7 +64,7 @@ async def test_pr_download_uses_cache(
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("abc123def456")
(pr_cache_dir / ".sha").write_text("abc123def456:base789abc012")
with patch(
"homeassistant.components.frontend.pr_download.GitHubAPI"
@@ -73,7 +73,10 @@ async def test_pr_download_uses_cache(
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
pr_response.data = {
"head": {"sha": "abc123def456"},
"base": {"sha": "base789abc012"},
}
mock_client.generic.return_value = pr_response
config = {
@@ -93,21 +96,21 @@ async def test_pr_download_uses_cache(
assert "pulls" in str(calls[0])
async def test_pr_download_cache_invalidated(
async def test_pr_download_cache_invalidated_head_changed(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test that cache is invalidated when commit changes."""
"""Test that cache is invalidated when head commit changes."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
(pr_cache_dir / ".sha").write_text("old_commit_sha")
(pr_cache_dir / ".sha").write_text("old_head_sha:base789abc012")
aioclient_mock.get(
"https://api.github.com/artifact/download",
@@ -124,7 +127,43 @@ async def test_pr_download_cache_invalidated(
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Should download - commit changed
# Should download - head commit changed
assert len(aioclient_mock.mock_calls) == 1
async def test_pr_download_cache_invalidated_base_changed(
hass: HomeAssistant,
tmp_path: Path,
mock_github_api,
aioclient_mock: AiohttpClientMocker,
mock_zipfile,
) -> None:
"""Test that cache is invalidated when base branch changes."""
hass.config.config_dir = str(tmp_path)
pr_cache_dir = tmp_path / ".cache" / "frontend" / "development_artifacts"
frontend_dir = pr_cache_dir / "hass_frontend"
frontend_dir.mkdir(parents=True)
(frontend_dir / "index.html").write_text("test")
# Same head SHA but different base SHA
(pr_cache_dir / ".sha").write_text("abc123def456:old_base_sha")
aioclient_mock.get(
"https://api.github.com/artifact/download",
content=b"fake zip data",
)
config = {
DOMAIN: {
"development_pr": 12345,
"github_token": "test_token",
}
}
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
# Should download - base branch changed
assert len(aioclient_mock.mock_calls) == 1
@@ -261,7 +300,10 @@ async def test_pr_download_artifact_search_github_errors(
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
pr_response.data = {
"head": {"sha": "abc123def456"},
"base": {"sha": "base789abc012"},
}
async def generic_side_effect(endpoint, **_kwargs):
if "pulls" in endpoint:
@@ -299,7 +341,10 @@ async def test_pr_download_artifact_not_found(
mock_gh_class.return_value = mock_client
pr_response = AsyncMock()
pr_response.data = {"head": {"sha": "abc123def456"}}
pr_response.data = {
"head": {"sha": "abc123def456"},
"base": {"sha": "base789abc012"},
}
workflow_response = AsyncMock()
workflow_response.data = {"workflow_runs": []}