1
0
mirror of https://github.com/home-assistant/supervisor.git synced 2026-02-15 07:27:13 +00:00

Migrate export image to aiodocker (#6534)

* Migrate export image to aiodocker

* Remove aiofiles and just use executor

* Fixes from feedback
This commit is contained in:
Mike Degatano
2026-02-11 04:03:50 -05:00
committed by GitHub
parent 7e91cfe01c
commit 825ff415e0
6 changed files with 108 additions and 72 deletions

View File

@@ -1316,65 +1316,59 @@ class Addon(AddonModel):
"""
def _addon_backup(
store_image: bool,
metadata: dict[str, Any],
apparmor_profile: str | None,
addon_config_used: bool,
temp_dir: TemporaryDirectory,
temp_path: Path,
):
"""Start the backup process."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
# Store local configs/state
try:
write_json_file(temp_path.joinpath("addon.json"), metadata)
except ConfigurationFileError as err:
_LOGGER.error("Can't save meta for %s: %s", self.slug, err)
raise BackupRestoreUnknownError() from err
# store local image
if store_image:
try:
self.instance.export_image(temp_path.joinpath("image.tar"))
except DockerError as err:
raise BackupRestoreUnknownError() from err
# Store local configs/state
# Store AppArmor Profile
if apparmor_profile:
profile_backup_file = temp_path.joinpath("apparmor.txt")
try:
write_json_file(temp_path.joinpath("addon.json"), metadata)
except ConfigurationFileError as err:
_LOGGER.error("Can't save meta for %s: %s", self.slug, err)
self.sys_host.apparmor.backup_profile(
apparmor_profile, profile_backup_file
)
except HostAppArmorError as err:
_LOGGER.error(
"Can't backup AppArmor profile for %s: %s", self.slug, err
)
raise BackupRestoreUnknownError() from err
# Store AppArmor Profile
if apparmor_profile:
profile_backup_file = temp_path.joinpath("apparmor.txt")
try:
self.sys_host.apparmor.backup_profile(
apparmor_profile, profile_backup_file
)
except HostAppArmorError as err:
raise BackupRestoreUnknownError() from err
# Write tarfile
with tar_file as backup:
# Backup metadata
backup.add(temp_dir.name, arcname=".")
# Write tarfile
with tar_file as backup:
# Backup metadata
backup.add(temp, arcname=".")
# Backup data
atomic_contents_add(
backup,
self.path_data,
file_filter=partial(
self._is_excluded_by_filter, self.path_data, "data"
),
arcname="data",
)
# Backup data
# Backup config (if used and existing, restore handles this gracefully)
if addon_config_used and self.path_config.is_dir():
atomic_contents_add(
backup,
self.path_data,
self.path_config,
file_filter=partial(
self._is_excluded_by_filter, self.path_data, "data"
self._is_excluded_by_filter, self.path_config, "config"
),
arcname="data",
arcname="config",
)
# Backup config (if used and existing, restore handles this gracefully)
if addon_config_used and self.path_config.is_dir():
atomic_contents_add(
backup,
self.path_config,
file_filter=partial(
self._is_excluded_by_filter, self.path_config, "config"
),
arcname="config",
)
wait_for_start: asyncio.Task | None = None
data = {
@@ -1388,22 +1382,35 @@ class Addon(AddonModel):
)
was_running = await self.begin_backup()
temp_dir = await self.sys_run_in_executor(
TemporaryDirectory, dir=self.sys_config.path_tmp
)
temp_path = Path(temp_dir.name)
_LOGGER.info("Building backup for add-on %s", self.slug)
try:
_LOGGER.info("Building backup for add-on %s", self.slug)
# store local image
if self.need_build:
await self.instance.export_image(temp_path.joinpath("image.tar"))
await self.sys_run_in_executor(
partial(
_addon_backup,
store_image=self.need_build,
metadata=data,
apparmor_profile=apparmor_profile,
addon_config_used=self.addon_config_used,
temp_dir=temp_dir,
temp_path=temp_path,
)
)
_LOGGER.info("Finish backup for addon %s", self.slug)
except DockerError as err:
_LOGGER.error("Can't export image for addon %s: %s", self.slug, err)
raise BackupRestoreUnknownError() from err
except (tarfile.TarError, OSError, AddFileError) as err:
_LOGGER.error("Can't write backup tarfile for addon %s: %s", self.slug, err)
raise BackupRestoreUnknownError() from err
finally:
await self.sys_run_in_executor(temp_dir.cleanup)
if was_running:
wait_for_start = await self.end_backup()

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
import asyncio
import errno
from io import IOBase
from io import BufferedWriter
import logging
from pathlib import Path
import re
@@ -44,6 +44,7 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
DEFAULT_CHUNK_SIZE,
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes
@@ -480,14 +481,14 @@ class APIBackups(CoreSysAttributes):
tmp_path = await self.sys_backups.get_upload_path_for_location(location)
temp_dir: TemporaryDirectory | None = None
backup_file_stream: IOBase | None = None
backup_file_stream: BufferedWriter | None = None
def open_backup_file() -> Path:
def open_backup_file() -> tuple[Path, BufferedWriter]:
nonlocal temp_dir, backup_file_stream
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
tar_file = Path(temp_dir.name, "upload.tar")
backup_file_stream = tar_file.open("wb")
return tar_file
return (tar_file, backup_file_stream)
def close_backup_file() -> None:
if backup_file_stream:
@@ -503,12 +504,10 @@ class APIBackups(CoreSysAttributes):
if not isinstance(contents, BodyPartReader):
raise APIError("Improperly formatted upload, could not read backup")
tar_file = await self.sys_run_in_executor(open_backup_file)
while chunk := await contents.read_chunk(size=2**16):
await self.sys_run_in_executor(
cast(IOBase, backup_file_stream).write, chunk
)
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
tar_file, backup_writer = await self.sys_run_in_executor(open_backup_file)
while chunk := await contents.read_chunk(size=DEFAULT_CHUNK_SIZE):
await self.sys_run_in_executor(backup_writer.write, chunk)
await self.sys_run_in_executor(backup_writer.close)
backup = await asyncio.shield(
self.sys_backups.import_backup(

View File

@@ -414,6 +414,9 @@ ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_AD
OBSERVER_PORT = 4357
# Used for stream operations
DEFAULT_CHUNK_SIZE = 2**16 # 64KiB
class AddonBootConfig(StrEnum):
"""Boot mode config for the add-on."""

View File

@@ -779,14 +779,11 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Build %s:%s done", self.image, version)
def export_image(self, tar_file: Path) -> None:
"""Export current images into a tar file.
Must be run in executor.
"""
async def export_image(self, tar_file: Path) -> None:
"""Export current images into a tar file."""
if not self.image:
raise RuntimeError("Cannot export without image!")
self.sys_docker.export_image(self.image, self.version, tar_file)
await self.sys_docker.export_image(self.image, self.version, tar_file)
@Job(
name="docker_addon_import_image",

View File

@@ -6,8 +6,10 @@ import asyncio
from collections.abc import Mapping
from contextlib import suppress
from dataclasses import dataclass
import errno
from functools import partial
from http import HTTPStatus
from io import BufferedWriter
from ipaddress import IPv4Address
import json
import logging
@@ -31,6 +33,7 @@ from ..const import (
ATTR_ENABLE_IPV6,
ATTR_MTU,
ATTR_REGISTRIES,
DEFAULT_CHUNK_SIZE,
DNS_SUFFIX,
DOCKER_NETWORK,
ENV_SUPERVISOR_CPU_RT,
@@ -47,6 +50,7 @@ from ..exceptions import (
DockerNotFound,
DockerRequestError,
)
from ..resolution.const import UnhealthyReason
from ..utils.common import FileConfiguration
from ..validate import SCHEMA_DOCKER_CONFIG
from .const import (
@@ -1054,24 +1058,35 @@ class DockerAPI(CoreSysAttributes):
f"Could not inspect imported image due to: {err!s}", _LOGGER.error
) from err
def export_image(self, image: str, version: AwesomeVersion, tar_file: Path) -> None:
async def export_image(
self, image: str, version: AwesomeVersion, tar_file: Path
) -> None:
"""Export current images into a tar file."""
try:
docker_image = self.dockerpy.api.get_image(f"{image}:{version}")
except (docker_errors.DockerException, requests.RequestException) as err:
raise DockerError(
f"Can't fetch image {image}: {err}", _LOGGER.error
) from err
_LOGGER.info("Exporting image %s to %s", image, tar_file)
image_tar_stream: BufferedWriter | None = None
_LOGGER.info("Export image %s to %s", image, tar_file)
try:
with tar_file.open("wb") as write_tar:
for chunk in docker_image:
write_tar.write(chunk)
except (OSError, requests.RequestException) as err:
image_tar_stream = image_writer = cast(
BufferedWriter, await self.sys_run_in_executor(tar_file.open, "wb")
)
async with self.images.export_image(f"{image}:{version}") as content:
async for chunk in content.iter_chunked(DEFAULT_CHUNK_SIZE):
await self.sys_run_in_executor(image_writer.write, chunk)
except aiodocker.DockerError as err:
raise DockerError(
f"Can't fetch image {image}:{version}: {err}", _LOGGER.error
) from err
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.add_unhealthy_reason(
UnhealthyReason.OSERROR_BAD_MESSAGE
)
raise DockerError(
f"Can't write tar file {tar_file}: {err}", _LOGGER.error
) from err
finally:
if image_tar_stream:
await self.sys_run_in_executor(image_tar_stream.close)
_LOGGER.info("Export image %s done", image)

View File

@@ -196,6 +196,21 @@ async def docker() -> DockerAPI:
)
docker_images.pull.return_value = AsyncIterator([{}])
# Export image mocking
class MockCM:
def __init__(self):
self.content = [b""]
async def __aenter__(self):
out = MagicMock()
out.iter_chunked.return_value = AsyncIterator(self.content)
return out
async def __aexit__(self, exc_type, exc, tb):
return None
docker_images.export_image.return_value = MockCM()
# Containers mocking
docker_containers.get.return_value = docker_container = MagicMock(
spec=DockerContainer, id=container_inspect["Id"]