mirror of
https://github.com/home-assistant/core.git
synced 2025-12-19 18:38:58 +00:00
SFTP/SSH as remote Backup location (#135844)
Co-authored-by: Josef Zweck <josef@zweck.dev> Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
This commit is contained in:
@@ -460,6 +460,7 @@ homeassistant.components.sensorpush_cloud.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.sftp_storage.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1394,6 +1394,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
|
||||
155
homeassistant/components/sftp_storage/__init__.py
Normal file
155
homeassistant/components/sftp_storage/__init__.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""Integration for SFTP Storage."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from dataclasses import dataclass, field
|
||||
import errno
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from homeassistant.components.backup import BackupAgentError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
from .client import BackupAgentClient
|
||||
from .const import (
|
||||
CONF_BACKUP_LOCATION,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PRIVATE_KEY_FILE,
|
||||
CONF_USERNAME,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
type SFTPConfigEntry = ConfigEntry[SFTPConfigEntryData]
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class SFTPConfigEntryData:
|
||||
"""Dataclass holding all config entry data for an SFTP Storage entry."""
|
||||
|
||||
host: str
|
||||
port: int
|
||||
username: str
|
||||
password: str | None = field(repr=False)
|
||||
private_key_file: str | None
|
||||
backup_location: str
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> bool:
|
||||
"""Set up SFTP Storage from a config entry."""
|
||||
|
||||
cfg = SFTPConfigEntryData(
|
||||
host=entry.data[CONF_HOST],
|
||||
port=entry.data[CONF_PORT],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data.get(CONF_PASSWORD),
|
||||
private_key_file=entry.data.get(CONF_PRIVATE_KEY_FILE, []),
|
||||
backup_location=entry.data[CONF_BACKUP_LOCATION],
|
||||
)
|
||||
entry.runtime_data = cfg
|
||||
|
||||
# Establish a connection during setup.
|
||||
# This will raise exception if there is something wrong with either
|
||||
# SSH server or config.
|
||||
try:
|
||||
client = BackupAgentClient(entry, hass)
|
||||
await client.open()
|
||||
except BackupAgentError as e:
|
||||
raise ConfigEntryError from e
|
||||
|
||||
# Notify backup listeners
|
||||
def _async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> None:
|
||||
"""Remove an SFTP Storage config entry."""
|
||||
|
||||
def remove_files(entry: SFTPConfigEntry) -> None:
|
||||
pkey = Path(entry.data[CONF_PRIVATE_KEY_FILE])
|
||||
|
||||
if pkey.exists():
|
||||
LOGGER.debug(
|
||||
"Removing private key (%s) for %s integration for host %s@%s",
|
||||
pkey,
|
||||
DOMAIN,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
)
|
||||
try:
|
||||
pkey.unlink()
|
||||
except OSError as e:
|
||||
LOGGER.warning(
|
||||
"Failed to remove private key %s for %s integration for host %s@%s. %s",
|
||||
pkey.name,
|
||||
DOMAIN,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
str(e),
|
||||
)
|
||||
|
||||
try:
|
||||
pkey.parent.rmdir()
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY: # Directory not empty
|
||||
if LOGGER.isEnabledFor(logging.DEBUG):
|
||||
leftover_files = []
|
||||
# If we get an exception while gathering leftover files, make sure to log plain message.
|
||||
with contextlib.suppress(OSError):
|
||||
leftover_files = [f.name for f in pkey.parent.iterdir()]
|
||||
|
||||
LOGGER.debug(
|
||||
"Storage directory for %s integration is not empty (%s)%s",
|
||||
DOMAIN,
|
||||
str(pkey.parent),
|
||||
f", files: {', '.join(leftover_files)}"
|
||||
if leftover_files
|
||||
else "",
|
||||
)
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Error occurred while removing directory %s for integration %s: %s at host %s@%s",
|
||||
str(pkey.parent),
|
||||
DOMAIN,
|
||||
str(e),
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Removed storage directory for %s integration",
|
||||
DOMAIN,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
)
|
||||
|
||||
if bool(entry.data.get(CONF_PRIVATE_KEY_FILE)):
|
||||
LOGGER.debug(
|
||||
"Cleaning up after %s integration for host %s@%s",
|
||||
DOMAIN,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
)
|
||||
await hass.async_add_executor_job(remove_files, entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> bool:
|
||||
"""Unload SFTP Storage config entry."""
|
||||
LOGGER.debug(
|
||||
"Unloading %s integration for host %s@%s",
|
||||
DOMAIN,
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_HOST],
|
||||
)
|
||||
return True
|
||||
153
homeassistant/components/sftp_storage/backup.py
Normal file
153
homeassistant/components/sftp_storage/backup.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Backup platform for the SFTP Storage integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from typing import Any
|
||||
|
||||
from asyncssh.sftp import SFTPError
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import SFTPConfigEntry
|
||||
from .client import BackupAgentClient
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN, LOGGER
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Register the backup agents."""
|
||||
entries: list[SFTPConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
return [SFTPBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed."""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
class SFTPBackupAgent(BackupAgent):
|
||||
"""SFTP Backup Storage agent."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: SFTPConfigEntry) -> None:
|
||||
"""Initialize the SFTPBackupAgent backup sync agent."""
|
||||
super().__init__()
|
||||
self._entry: SFTPConfigEntry = entry
|
||||
self._hass: HomeAssistant = hass
|
||||
self.name: str = entry.title
|
||||
self.unique_id: str = entry.entry_id
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file from SFTP."""
|
||||
LOGGER.debug(
|
||||
"Establishing SFTP connection to remote host in order to download backup id: %s",
|
||||
backup_id,
|
||||
)
|
||||
try:
|
||||
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
|
||||
async with BackupAgentClient(self._entry, self._hass) as client:
|
||||
return await client.iter_file(backup_id)
|
||||
except FileNotFoundError as e:
|
||||
raise BackupNotFound(
|
||||
f"Unable to initiate download of backup id: {backup_id}. {e}"
|
||||
) from e
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
LOGGER.debug("Received request to upload backup: %s", backup)
|
||||
iterator = await open_stream()
|
||||
|
||||
LOGGER.debug(
|
||||
"Establishing SFTP connection to remote host in order to upload backup"
|
||||
)
|
||||
|
||||
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
|
||||
async with BackupAgentClient(self._entry, self._hass) as client:
|
||||
LOGGER.debug("Uploading backup: %s", backup.backup_id)
|
||||
await client.async_upload_backup(iterator, backup)
|
||||
LOGGER.debug("Successfully uploaded backup id: %s", backup.backup_id)
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file from SFTP Storage."""
|
||||
LOGGER.debug("Received request to delete backup id: %s", backup_id)
|
||||
|
||||
try:
|
||||
LOGGER.debug(
|
||||
"Establishing SFTP connection to remote host in order to delete backup"
|
||||
)
|
||||
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
|
||||
async with BackupAgentClient(self._entry, self._hass) as client:
|
||||
await client.async_delete_backup(backup_id)
|
||||
except FileNotFoundError as err:
|
||||
raise BackupNotFound(str(err)) from err
|
||||
except SFTPError as err:
|
||||
raise BackupAgentError(
|
||||
f"Failed to delete backup id: {backup_id}: {err}"
|
||||
) from err
|
||||
|
||||
LOGGER.debug("Successfully removed backup id: %s", backup_id)
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups stored on SFTP Storage."""
|
||||
|
||||
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
|
||||
async with BackupAgentClient(self._entry, self._hass) as client:
|
||||
try:
|
||||
return await client.async_list_backups()
|
||||
except SFTPError as err:
|
||||
raise BackupAgentError(
|
||||
f"Remote server error while attempting to list backups: {err}"
|
||||
) from err
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
backups = await self.async_list_backups()
|
||||
|
||||
for backup in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
LOGGER.debug("Returning backup id: %s. %s", backup_id, backup)
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup id: {backup_id} not found")
|
||||
311
homeassistant/components/sftp_storage/client.py
Normal file
311
homeassistant/components/sftp_storage/client.py
Normal file
@@ -0,0 +1,311 @@
|
||||
"""Client for SFTP Storage integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Self
|
||||
|
||||
from asyncssh import (
|
||||
SFTPClient,
|
||||
SFTPClientFile,
|
||||
SSHClientConnection,
|
||||
SSHClientConnectionOptions,
|
||||
connect,
|
||||
)
|
||||
from asyncssh.misc import PermissionDenied
|
||||
from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgentError,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import SFTPConfigEntry, SFTPConfigEntryData
|
||||
|
||||
|
||||
def get_client_options(cfg: SFTPConfigEntryData) -> SSHClientConnectionOptions:
|
||||
"""Use this function with `hass.async_add_executor_job` to asynchronously get `SSHClientConnectionOptions`."""
|
||||
|
||||
return SSHClientConnectionOptions(
|
||||
known_hosts=None,
|
||||
username=cfg.username,
|
||||
password=cfg.password,
|
||||
client_keys=cfg.private_key_file,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileIterator:
|
||||
"""Returns iterator of remote file located in SFTP Server.
|
||||
|
||||
This exists in order to properly close remote file after operation is completed
|
||||
and to avoid premature closing of file and session if `BackupAgentClient` is used
|
||||
as context manager.
|
||||
"""
|
||||
|
||||
_client: BackupAgentClient
|
||||
_fileobj: SFTPClientFile
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cfg: SFTPConfigEntry,
|
||||
hass: HomeAssistant,
|
||||
file_path: str,
|
||||
buffer_size: int = BUF_SIZE,
|
||||
) -> None:
|
||||
"""Initialize `AsyncFileIterator`."""
|
||||
self.cfg: SFTPConfigEntry = cfg
|
||||
self.hass: HomeAssistant = hass
|
||||
self.file_path: str = file_path
|
||||
self.buffer_size = buffer_size
|
||||
self._initialized: bool = False
|
||||
LOGGER.debug("Opening file: %s in Async File Iterator", file_path)
|
||||
|
||||
async def _initialize(self) -> None:
|
||||
"""Load file object."""
|
||||
self._client: BackupAgentClient = await BackupAgentClient(
|
||||
self.cfg, self.hass
|
||||
).open()
|
||||
self._fileobj: SFTPClientFile = await self._client.sftp.open(
|
||||
self.file_path, "rb"
|
||||
)
|
||||
|
||||
self._initialized = True
|
||||
|
||||
def __aiter__(self) -> AsyncIterator[bytes]:
|
||||
"""Return self as iterator."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Return next bytes as provided in buffer size."""
|
||||
if not self._initialized:
|
||||
await self._initialize()
|
||||
|
||||
chunk: bytes = await self._fileobj.read(self.buffer_size)
|
||||
if not chunk:
|
||||
try:
|
||||
await self._fileobj.close()
|
||||
await self._client.close()
|
||||
finally:
|
||||
raise StopAsyncIteration
|
||||
return chunk
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupMetadata:
|
||||
"""Represent single backup file metadata."""
|
||||
|
||||
file_path: str
|
||||
metadata: dict[str, str | dict[str, list[str]]]
|
||||
metadata_file: str
|
||||
|
||||
|
||||
class BackupAgentClient:
|
||||
"""Helper class that manages SSH and SFTP Server connections."""
|
||||
|
||||
sftp: SFTPClient
|
||||
|
||||
def __init__(self, config: SFTPConfigEntry, hass: HomeAssistant) -> None:
|
||||
"""Initialize `BackupAgentClient`."""
|
||||
self.cfg: SFTPConfigEntry = config
|
||||
self.hass: HomeAssistant = hass
|
||||
self._ssh: SSHClientConnection | None = None
|
||||
LOGGER.debug("Initialized with config: %s", self.cfg.runtime_data)
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
"""Async context manager entrypoint."""
|
||||
|
||||
return await self.open() # type: ignore[return-value] # mypy will otherwise raise an error
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
"""Async Context Manager exit routine."""
|
||||
if self.sftp:
|
||||
self.sftp.exit()
|
||||
await self.sftp.wait_closed()
|
||||
|
||||
if self._ssh:
|
||||
self._ssh.close()
|
||||
|
||||
await self._ssh.wait_closed()
|
||||
|
||||
async def _load_metadata(self, backup_id: str) -> BackupMetadata:
|
||||
"""Return `BackupMetadata` object`.
|
||||
|
||||
Raises:
|
||||
------
|
||||
`FileNotFoundError` -- if metadata file is not found.
|
||||
|
||||
"""
|
||||
|
||||
# Test for metadata file existence.
|
||||
metadata_file = (
|
||||
f"{self.cfg.runtime_data.backup_location}/.{backup_id}.metadata.json"
|
||||
)
|
||||
if not await self.sftp.exists(metadata_file):
|
||||
raise FileNotFoundError(
|
||||
f"Metadata file not found at remote location: {metadata_file}"
|
||||
)
|
||||
|
||||
async with self.sftp.open(metadata_file, "r") as f:
|
||||
return BackupMetadata(
|
||||
**json.loads(await f.read()), metadata_file=metadata_file
|
||||
)
|
||||
|
||||
async def async_delete_backup(self, backup_id: str) -> None:
|
||||
"""Delete backup archive.
|
||||
|
||||
Raises:
|
||||
------
|
||||
`FileNotFoundError` -- if either metadata file or archive is not found.
|
||||
|
||||
"""
|
||||
|
||||
metadata: BackupMetadata = await self._load_metadata(backup_id)
|
||||
|
||||
# If for whatever reason, archive does not exist but metadata file does,
|
||||
# remove the metadata file.
|
||||
if not await self.sftp.exists(metadata.file_path):
|
||||
await self.sftp.unlink(metadata.metadata_file)
|
||||
raise FileNotFoundError(
|
||||
f"File at provided remote location: {metadata.file_path} does not exist."
|
||||
)
|
||||
|
||||
LOGGER.debug("Removing file at path: %s", metadata.file_path)
|
||||
await self.sftp.unlink(metadata.file_path)
|
||||
LOGGER.debug("Removing metadata at path: %s", metadata.metadata_file)
|
||||
await self.sftp.unlink(metadata.metadata_file)
|
||||
|
||||
async def async_list_backups(self) -> list[AgentBackup]:
|
||||
"""Iterate through a list of metadata files and return a list of `AgentBackup` objects."""
|
||||
|
||||
backups: list[AgentBackup] = []
|
||||
|
||||
for file in await self.list_backup_location():
|
||||
LOGGER.debug(
|
||||
"Evaluating metadata file at remote location: %s@%s:%s",
|
||||
self.cfg.runtime_data.username,
|
||||
self.cfg.runtime_data.host,
|
||||
file,
|
||||
)
|
||||
|
||||
try:
|
||||
async with self.sftp.open(file, "r") as rfile:
|
||||
metadata = BackupMetadata(
|
||||
**json.loads(await rfile.read()), metadata_file=file
|
||||
)
|
||||
backups.append(AgentBackup.from_dict(metadata.metadata))
|
||||
except (json.JSONDecodeError, TypeError) as e:
|
||||
LOGGER.error(
|
||||
"Failed to load backup metadata from file: %s. %s", file, str(e)
|
||||
)
|
||||
continue
|
||||
|
||||
return backups
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
iterator: AsyncIterator[bytes],
|
||||
backup: AgentBackup,
|
||||
) -> None:
|
||||
"""Accept `iterator` as bytes iterator and write backup archive to SFTP Server."""
|
||||
|
||||
file_path = (
|
||||
f"{self.cfg.runtime_data.backup_location}/{suggested_filename(backup)}"
|
||||
)
|
||||
async with self.sftp.open(file_path, "wb") as f:
|
||||
async for b in iterator:
|
||||
await f.write(b)
|
||||
|
||||
LOGGER.debug("Writing backup metadata")
|
||||
metadata: dict[str, str | dict[str, list[str]]] = {
|
||||
"file_path": file_path,
|
||||
"metadata": backup.as_dict(),
|
||||
}
|
||||
async with self.sftp.open(
|
||||
f"{self.cfg.runtime_data.backup_location}/.{backup.backup_id}.metadata.json",
|
||||
"w",
|
||||
) as f:
|
||||
await f.write(json.dumps(metadata))
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the `BackupAgentClient` context manager."""
|
||||
await self.__aexit__(None, None, None)
|
||||
|
||||
async def iter_file(self, backup_id: str) -> AsyncFileIterator:
|
||||
"""Return Async File Iterator object.
|
||||
|
||||
`SFTPClientFile` object (that would be returned with `sftp.open`) is not an iterator.
|
||||
So we return custom made class - `AsyncFileIterator` that would allow iteration on file object.
|
||||
|
||||
Raises:
|
||||
------
|
||||
- `FileNotFoundError` -- if metadata or backup archive is not found.
|
||||
|
||||
"""
|
||||
|
||||
metadata: BackupMetadata = await self._load_metadata(backup_id)
|
||||
if not await self.sftp.exists(metadata.file_path):
|
||||
raise FileNotFoundError("Backup archive not found on remote location.")
|
||||
return AsyncFileIterator(self.cfg, self.hass, metadata.file_path, BUF_SIZE)
|
||||
|
||||
async def list_backup_location(self) -> list[str]:
|
||||
"""Return a list of `*.metadata.json` files located in backup location."""
|
||||
files = []
|
||||
LOGGER.debug(
|
||||
"Changing directory to: `%s`", self.cfg.runtime_data.backup_location
|
||||
)
|
||||
await self.sftp.chdir(self.cfg.runtime_data.backup_location)
|
||||
|
||||
for file in await self.sftp.listdir():
|
||||
LOGGER.debug(
|
||||
"Checking if file: `%s/%s` is metadata file",
|
||||
self.cfg.runtime_data.backup_location,
|
||||
file,
|
||||
)
|
||||
if file.endswith(".metadata.json"):
|
||||
LOGGER.debug("Found metadata file: `%s`", file)
|
||||
files.append(f"{self.cfg.runtime_data.backup_location}/{file}")
|
||||
return files
|
||||
|
||||
async def open(self) -> BackupAgentClient:
|
||||
"""Return initialized `BackupAgentClient`.
|
||||
|
||||
This is to avoid calling `__aenter__` dunder method.
|
||||
"""
|
||||
|
||||
# Configure SSH Client Connection
|
||||
try:
|
||||
self._ssh = await connect(
|
||||
host=self.cfg.runtime_data.host,
|
||||
port=self.cfg.runtime_data.port,
|
||||
options=await self.hass.async_add_executor_job(
|
||||
get_client_options, self.cfg.runtime_data
|
||||
),
|
||||
)
|
||||
except (OSError, PermissionDenied) as e:
|
||||
raise BackupAgentError(
|
||||
"Failure while attempting to establish SSH connection. Please check SSH credentials and if changed, re-install the integration"
|
||||
) from e
|
||||
|
||||
# Configure SFTP Client Connection
|
||||
try:
|
||||
self.sftp = await self._ssh.start_sftp_client()
|
||||
await self.sftp.chdir(self.cfg.runtime_data.backup_location)
|
||||
except (SFTPNoSuchFile, SFTPPermissionDenied) as e:
|
||||
raise BackupAgentError(
|
||||
"Failed to create SFTP client. Re-installing integration might be required"
|
||||
) from e
|
||||
|
||||
return self
|
||||
236
homeassistant/components/sftp_storage/config_flow.py
Normal file
236
homeassistant/components/sftp_storage/config_flow.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Config flow to configure the SFTP Storage integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from typing import Any, cast
|
||||
|
||||
from asyncssh import KeyImportError, SSHClientConnectionOptions, connect
|
||||
from asyncssh.misc import PermissionDenied
|
||||
from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.file_upload import process_uploaded_file
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.selector import (
|
||||
FileSelector,
|
||||
FileSelectorConfig,
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.util.ulid import ulid
|
||||
|
||||
from . import SFTPConfigEntryData
|
||||
from .client import get_client_options
|
||||
from .const import (
|
||||
CONF_BACKUP_LOCATION,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PRIVATE_KEY_FILE,
|
||||
CONF_USERNAME,
|
||||
DEFAULT_PKEY_NAME,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=22): int,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Optional(CONF_PRIVATE_KEY_FILE): FileSelector(
|
||||
FileSelectorConfig(accept="*")
|
||||
),
|
||||
vol.Required(CONF_BACKUP_LOCATION): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class SFTPStorageException(Exception):
|
||||
"""Base exception for SFTP Storage integration."""
|
||||
|
||||
|
||||
class SFTPStorageInvalidPrivateKey(SFTPStorageException):
|
||||
"""Exception raised during config flow - when user provided invalid private key file."""
|
||||
|
||||
|
||||
class SFTPStorageMissingPasswordOrPkey(SFTPStorageException):
|
||||
"""Exception raised during config flow - when user did not provide password or private key file."""
|
||||
|
||||
|
||||
class SFTPFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle an SFTP Storage config flow."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize SFTP Storage Flow Handler."""
|
||||
self._client_keys: list = []
|
||||
|
||||
async def _validate_auth_and_save_keyfile(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Validate authentication input and persist uploaded key file.
|
||||
|
||||
Ensures that at least one of password or private key is provided. When a
|
||||
private key is supplied, the uploaded file is saved to Home Assistant's
|
||||
config storage and `user_input[CONF_PRIVATE_KEY_FILE]` is replaced with
|
||||
the stored path.
|
||||
|
||||
Returns: the possibly updated `user_input`.
|
||||
|
||||
Raises:
|
||||
- SFTPStorageMissingPasswordOrPkey: Neither password nor private key provided
|
||||
- SFTPStorageInvalidPrivateKey: The provided private key has an invalid format
|
||||
"""
|
||||
|
||||
# If neither password nor private key is provided, error out;
|
||||
# we need at least one to perform authentication.
|
||||
if not (user_input.get(CONF_PASSWORD) or user_input.get(CONF_PRIVATE_KEY_FILE)):
|
||||
raise SFTPStorageMissingPasswordOrPkey
|
||||
|
||||
if key_file := user_input.get(CONF_PRIVATE_KEY_FILE):
|
||||
client_key = await save_uploaded_pkey_file(self.hass, cast(str, key_file))
|
||||
|
||||
LOGGER.debug("Saved client key: %s", client_key)
|
||||
user_input[CONF_PRIVATE_KEY_FILE] = client_key
|
||||
|
||||
return user_input
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
step_id: str = "user",
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
LOGGER.debug("Source: %s", self.source)
|
||||
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_BACKUP_LOCATION: user_input[CONF_BACKUP_LOCATION],
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate auth input and save uploaded key file if provided
|
||||
user_input = await self._validate_auth_and_save_keyfile(user_input)
|
||||
|
||||
# Create a session using your credentials
|
||||
user_config = SFTPConfigEntryData(
|
||||
host=user_input[CONF_HOST],
|
||||
port=user_input[CONF_PORT],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input.get(CONF_PASSWORD),
|
||||
private_key_file=user_input.get(CONF_PRIVATE_KEY_FILE),
|
||||
backup_location=user_input[CONF_BACKUP_LOCATION],
|
||||
)
|
||||
|
||||
placeholders["backup_location"] = user_config.backup_location
|
||||
|
||||
# Raises:
|
||||
# - OSError, if host or port are not correct.
|
||||
# - SFTPStorageInvalidPrivateKey, if private key is not valid format.
|
||||
# - asyncssh.misc.PermissionDenied, if credentials are not correct.
|
||||
# - SFTPStorageMissingPasswordOrPkey, if password and private key are not provided.
|
||||
# - asyncssh.sftp.SFTPNoSuchFile, if directory does not exist.
|
||||
# - asyncssh.sftp.SFTPPermissionDenied, if we don't have access to said directory
|
||||
async with (
|
||||
connect(
|
||||
host=user_config.host,
|
||||
port=user_config.port,
|
||||
options=await self.hass.async_add_executor_job(
|
||||
get_client_options, user_config
|
||||
),
|
||||
) as ssh,
|
||||
ssh.start_sftp_client() as sftp,
|
||||
):
|
||||
await sftp.chdir(user_config.backup_location)
|
||||
await sftp.listdir()
|
||||
|
||||
LOGGER.debug(
|
||||
"Will register SFTP Storage agent with user@host %s@%s",
|
||||
user_config.host,
|
||||
user_config.username,
|
||||
)
|
||||
|
||||
except OSError as e:
|
||||
LOGGER.exception(e)
|
||||
placeholders["error_message"] = str(e)
|
||||
errors["base"] = "os_error"
|
||||
except SFTPStorageInvalidPrivateKey:
|
||||
errors["base"] = "invalid_key"
|
||||
except PermissionDenied as e:
|
||||
placeholders["error_message"] = str(e)
|
||||
errors["base"] = "permission_denied"
|
||||
except SFTPStorageMissingPasswordOrPkey:
|
||||
errors["base"] = "key_or_password_needed"
|
||||
except SFTPNoSuchFile:
|
||||
errors["base"] = "sftp_no_such_file"
|
||||
except SFTPPermissionDenied:
|
||||
errors["base"] = "sftp_permission_denied"
|
||||
except Exception as e: # noqa: BLE001
|
||||
LOGGER.exception(e)
|
||||
placeholders["error_message"] = str(e)
|
||||
placeholders["exception"] = type(e).__name__
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_config.username}@{user_config.host}",
|
||||
data=user_input,
|
||||
)
|
||||
finally:
|
||||
# We remove the saved private key file if any error occurred.
|
||||
if errors and bool(user_input.get(CONF_PRIVATE_KEY_FILE)):
|
||||
keyfile = Path(user_input[CONF_PRIVATE_KEY_FILE])
|
||||
keyfile.unlink(missing_ok=True)
|
||||
with suppress(OSError):
|
||||
keyfile.parent.rmdir()
|
||||
|
||||
if user_input:
|
||||
user_input.pop(CONF_PRIVATE_KEY_FILE, None)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=step_id,
|
||||
data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input),
|
||||
description_placeholders=placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
async def save_uploaded_pkey_file(hass: HomeAssistant, uploaded_file_id: str) -> str:
|
||||
"""Validate the uploaded private key and move it to the storage directory.
|
||||
|
||||
Return a string representing a path to private key file.
|
||||
Raises SFTPStorageInvalidPrivateKey if the file is invalid.
|
||||
"""
|
||||
|
||||
def _process_upload() -> str:
|
||||
with process_uploaded_file(hass, uploaded_file_id) as file_path:
|
||||
try:
|
||||
# Initializing this will verify if private key is in correct format
|
||||
SSHClientConnectionOptions(client_keys=[file_path])
|
||||
except KeyImportError as err:
|
||||
LOGGER.debug(err)
|
||||
raise SFTPStorageInvalidPrivateKey from err
|
||||
|
||||
dest_path = Path(hass.config.path(STORAGE_DIR, DOMAIN))
|
||||
dest_file = dest_path / f".{ulid()}_{DEFAULT_PKEY_NAME}"
|
||||
|
||||
# Create parent directory
|
||||
dest_file.parent.mkdir(exist_ok=True)
|
||||
return str(shutil.move(file_path, dest_file))
|
||||
|
||||
return await hass.async_add_executor_job(_process_upload)
|
||||
27
homeassistant/components/sftp_storage/const.py
Normal file
27
homeassistant/components/sftp_storage/const.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Constants for the SFTP Storage integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "sftp_storage"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONF_HOST: Final = "host"
|
||||
CONF_PORT: Final = "port"
|
||||
CONF_USERNAME: Final = "username"
|
||||
CONF_PASSWORD: Final = "password"
|
||||
CONF_PRIVATE_KEY_FILE: Final = "private_key_file"
|
||||
CONF_BACKUP_LOCATION: Final = "backup_location"
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
DEFAULT_PKEY_NAME: str = "sftp_storage_pkey"
|
||||
13
homeassistant/components/sftp_storage/manifest.json
Normal file
13
homeassistant/components/sftp_storage/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "sftp_storage",
|
||||
"name": "SFTP Storage",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@maretodoric"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["file_upload"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/sftp_storage",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["asyncssh==2.21.0"]
|
||||
}
|
||||
140
homeassistant/components/sftp_storage/quality_scale.yaml
Normal file
140
homeassistant/components/sftp_storage/quality_scale.yaml
Normal file
@@ -0,0 +1,140 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: No polling.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: No actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No configuration options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: No actions and no entities.
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This backup storage integration uses static SFTP credentials that do not expire
|
||||
or require token refresh. Authentication failures indicate configuration issues
|
||||
that should be resolved by reconfiguring the integration.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll or push.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only serves backup.
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service.
|
||||
docs-supported-functions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This backup storage integration's configuration consists of static SFTP
|
||||
connection parameters (host, port, credentials, backup path). Changes to
|
||||
these parameters effectively create a connection to a different backup
|
||||
location, which should be configured as a separate integration instance.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration provides backup storage functionality only. Connection
|
||||
failures are handled through config entry setup errors and do not require
|
||||
persistent repair issues. Users can resolve authentication or connectivity
|
||||
problems by reconfiguring the integration through the config flow.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
37
homeassistant/components/sftp_storage/strings.json
Normal file
37
homeassistant/components/sftp_storage/strings.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up SFTP Storage",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"private_key_file": "Private key file",
|
||||
"backup_location": "Remote path"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Hostname or IP address of SSH/SFTP server to connect to.",
|
||||
"port": "Port of your SSH/SFTP server. This is usually 22.",
|
||||
"username": "Username to authenticate with.",
|
||||
"password": "Password to authenticate with. Provide this or private key file.",
|
||||
"private_key_file": "Upload private key file used for authentication. Provide this or password.",
|
||||
"backup_location": "Remote path where to upload backups."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_key": "Invalid key uploaded. Please make sure key corresponds to valid SSH key algorithm.",
|
||||
"key_or_password_needed": "Please configure password or private key file location for SFTP Storage.",
|
||||
"os_error": "{error_message}. Please check if host and/or port are correct.",
|
||||
"permission_denied": "{error_message}",
|
||||
"sftp_no_such_file": "Could not check directory {backup_location}. Make sure directory exists.",
|
||||
"sftp_permission_denied": "Permission denied for directory {backup_location}",
|
||||
"unknown": "Unexpected exception ({exception}) occurred during config flow. {error_message}"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Integration already configured. Host with same address, port and backup location already exists."
|
||||
}
|
||||
}
|
||||
}
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -570,6 +570,7 @@ FLOWS = {
|
||||
"senz",
|
||||
"seventeentrack",
|
||||
"sfr_box",
|
||||
"sftp_storage",
|
||||
"sharkiq",
|
||||
"shelly",
|
||||
"shopping_list",
|
||||
|
||||
@@ -5872,6 +5872,12 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"sftp_storage": {
|
||||
"name": "SFTP Storage",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"sharkiq": {
|
||||
"name": "Shark IQ",
|
||||
"integration_type": "hub",
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -4356,6 +4356,16 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.sftp_storage.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.shell_command.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
3
requirements_all.txt
generated
3
requirements_all.txt
generated
@@ -550,6 +550,9 @@ asyncpysupla==0.0.5
|
||||
# homeassistant.components.sleepiq
|
||||
asyncsleepiq==1.6.0
|
||||
|
||||
# homeassistant.components.sftp_storage
|
||||
asyncssh==2.21.0
|
||||
|
||||
# homeassistant.components.aten_pe
|
||||
# atenpdu==0.3.2
|
||||
|
||||
|
||||
3
requirements_test_all.txt
generated
3
requirements_test_all.txt
generated
@@ -508,6 +508,9 @@ asyncarve==0.1.1
|
||||
# homeassistant.components.sleepiq
|
||||
asyncsleepiq==1.6.0
|
||||
|
||||
# homeassistant.components.sftp_storage
|
||||
asyncssh==2.21.0
|
||||
|
||||
# homeassistant.components.aurora
|
||||
auroranoaa==0.0.5
|
||||
|
||||
|
||||
1
tests/components/sftp_storage/__init__.py
Normal file
1
tests/components/sftp_storage/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests SFTP Storage integration."""
|
||||
139
tests/components/sftp_storage/asyncssh_mock.py
Normal file
139
tests/components/sftp_storage/asyncssh_mock.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""Mock classes for asyncssh module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Self
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from asyncssh.misc import async_context_manager
|
||||
|
||||
|
||||
class SSHClientConnectionMock:
|
||||
"""Class that mocks SSH Client connection."""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
"""Initialize SSHClientConnectionMock."""
|
||||
self._sftp: SFTPClientMock = SFTPClientMock()
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
"""Allow SSHClientConnectionMock to be used as an async context manager."""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args) -> None:
|
||||
"""Allow SSHClientConnectionMock to be used as an async context manager."""
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
"""Mock `close` from `SSHClientConnection`."""
|
||||
return
|
||||
|
||||
def mock_setup_backup(self, metadata: dict, with_bad: bool = False) -> str:
|
||||
"""Setup mocks to properly return a backup.
|
||||
|
||||
Return: Backup ID (slug)
|
||||
"""
|
||||
|
||||
slug = metadata["metadata"]["backup_id"]
|
||||
side_effect = [
|
||||
json.dumps(metadata), # from async_list_backups
|
||||
json.dumps(metadata), # from iter_file -> _load_metadata
|
||||
b"backup data", # from AsyncFileIterator
|
||||
b"",
|
||||
]
|
||||
self._sftp._mock_listdir.return_value = [f"{slug}.metadata.json"]
|
||||
|
||||
if with_bad:
|
||||
side_effect.insert(0, "invalid")
|
||||
self._sftp._mock_listdir.return_value = [
|
||||
"invalid.metadata.json",
|
||||
f"{slug}.metadata.json",
|
||||
]
|
||||
|
||||
self._sftp._mock_open._mock_read.side_effect = side_effect
|
||||
return slug
|
||||
|
||||
@async_context_manager
|
||||
async def start_sftp_client(self, *args, **kwargs) -> SFTPClientMock:
|
||||
"""Return mocked SFTP Client."""
|
||||
return self._sftp
|
||||
|
||||
async def wait_closed(self):
|
||||
"""Mock `wait_closed` from `SFTPClient`."""
|
||||
return
|
||||
|
||||
|
||||
class SFTPClientMock:
|
||||
"""Class that mocks SFTP Client connection."""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
"""Initialize `SFTPClientMock`."""
|
||||
self._mock_chdir = AsyncMock()
|
||||
self._mock_listdir = AsyncMock()
|
||||
self._mock_exists = AsyncMock(return_value=True)
|
||||
self._mock_unlink = AsyncMock()
|
||||
self._mock_open = SFTPOpenMock()
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
"""Allow SFTPClientMock to be used as an async context manager."""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args) -> None:
|
||||
"""Allow SFTPClientMock to be used as an async context manager."""
|
||||
self.exit()
|
||||
|
||||
async def chdir(self, *args) -> None:
|
||||
"""Mock `chdir` method from SFTPClient."""
|
||||
await self._mock_chdir(*args)
|
||||
|
||||
async def listdir(self, *args) -> list[str]:
|
||||
"""Mock `listdir` method from SFTPClient."""
|
||||
result = await self._mock_listdir(*args)
|
||||
return result if result is not None else []
|
||||
|
||||
@async_context_manager
|
||||
async def open(self, *args, **kwargs) -> SFTPOpenMock:
|
||||
"""Mock open a remote file."""
|
||||
return self._mock_open
|
||||
|
||||
async def exists(self, *args) -> bool:
|
||||
"""Mock `exists` method from SFTPClient."""
|
||||
return await self._mock_exists(*args)
|
||||
|
||||
async def unlink(self, *args) -> None:
|
||||
"""Mock `unlink` method from SFTPClient."""
|
||||
await self._mock_unlink(*args)
|
||||
|
||||
def exit(self):
|
||||
"""Mandatory method for quitting SFTP Client."""
|
||||
return
|
||||
|
||||
async def wait_closed(self):
|
||||
"""Mock `wait_closed` from `SFTPClient`."""
|
||||
return
|
||||
|
||||
|
||||
class SFTPOpenMock:
|
||||
"""Mocked remote file."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize arguments for mocked responses."""
|
||||
self._mock_read = AsyncMock(return_value=b"")
|
||||
self._mock_write = AsyncMock()
|
||||
self.close = AsyncMock(return_value=None)
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Allow SFTPOpenMock to be used as an async context manager."""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args) -> None:
|
||||
"""Allow SFTPOpenMock to be used as an async context manager."""
|
||||
|
||||
async def read(self, *args, **kwargs) -> bytes:
|
||||
"""Read remote file - mocked response from `self._mock_read`."""
|
||||
return await self._mock_read(*args, **kwargs)
|
||||
|
||||
async def write(self, content, *args, **kwargs) -> int:
|
||||
"""Mock write to remote file."""
|
||||
await self._mock_write(content, *args, **kwargs)
|
||||
return len(content)
|
||||
155
tests/components/sftp_storage/conftest.py
Normal file
155
tests/components/sftp_storage/conftest.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""PyTest fixtures and test helpers."""
|
||||
|
||||
from collections.abc import Awaitable, Callable, Generator
|
||||
from contextlib import contextmanager, suppress
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from asyncssh import generate_private_key
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN, AgentBackup
|
||||
from homeassistant.components.sftp_storage import SFTPConfigEntryData
|
||||
from homeassistant.components.sftp_storage.const import (
|
||||
CONF_BACKUP_LOCATION,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PRIVATE_KEY_FILE,
|
||||
CONF_USERNAME,
|
||||
DEFAULT_PKEY_NAME,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.ulid import ulid
|
||||
|
||||
from .asyncssh_mock import SSHClientConnectionMock, async_context_manager
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
type ComponentSetup = Callable[[], Awaitable[None]]
|
||||
|
||||
BACKUP_METADATA = {
|
||||
"file_path": "backup_location/backup.tar",
|
||||
"metadata": {
|
||||
"addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}],
|
||||
"backup_id": "test-backup",
|
||||
"date": "2025-01-01T01:23:45.687000+01:00",
|
||||
"database_included": True,
|
||||
"extra_metadata": {
|
||||
"instance_id": 1,
|
||||
"with_automatic_settings": False,
|
||||
"supervisor.backup_request_date": "2025-01-01T01:23:45.687000+01:00",
|
||||
},
|
||||
"folders": [],
|
||||
"homeassistant_included": True,
|
||||
"homeassistant_version": "2024.12.0",
|
||||
"name": "Test",
|
||||
"protected": True,
|
||||
"size": 1234,
|
||||
},
|
||||
}
|
||||
TEST_AGENT_BACKUP = AgentBackup.from_dict(BACKUP_METADATA["metadata"])
|
||||
|
||||
CONFIG_ENTRY_TITLE = "testsshuser@127.0.0.1"
|
||||
PRIVATE_KEY_FILE_UUID = "0123456789abcdef0123456789abcdef"
|
||||
USER_INPUT = {
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 22,
|
||||
CONF_USERNAME: "username",
|
||||
CONF_PASSWORD: "password",
|
||||
CONF_PRIVATE_KEY_FILE: PRIVATE_KEY_FILE_UUID,
|
||||
CONF_BACKUP_LOCATION: "backup_location",
|
||||
}
|
||||
TEST_AGENT_ID = ulid()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def private_key_file(hass: HomeAssistant) -> Generator[str]:
|
||||
"""Fixture that create private key file in integration storage directory."""
|
||||
|
||||
# Create private key file and parent directory.
|
||||
key_dest_path = Path(hass.config.path(STORAGE_DIR, DOMAIN))
|
||||
dest_file = key_dest_path / f".{ulid()}_{DEFAULT_PKEY_NAME}"
|
||||
dest_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write to file only once.
|
||||
if not dest_file.exists():
|
||||
dest_file.write_bytes(
|
||||
generate_private_key("ssh-rsa").export_private_key("pkcs8-pem")
|
||||
)
|
||||
|
||||
yield str(dest_file)
|
||||
|
||||
if dest_file.exists():
|
||||
dest_file.unlink(missing_ok=True)
|
||||
with suppress(OSError):
|
||||
dest_file.parent.rmdir()
|
||||
|
||||
|
||||
@pytest.fixture(name="setup_integration")
|
||||
async def mock_setup_integration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> ComponentSetup:
|
||||
"""Fixture for setting up the component manually."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
async def func(config_entry: MockConfigEntry = config_entry) -> None:
|
||||
assert await async_setup_component(hass, BACKUP_DOMAIN, {})
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
return func
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry")
|
||||
def mock_config_entry(hass: HomeAssistant) -> Generator[MockConfigEntry]:
|
||||
"""Fixture for MockConfigEntry."""
|
||||
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with private_key_file(hass) as private_key:
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
entry_id=TEST_AGENT_ID,
|
||||
unique_id=TEST_AGENT_ID,
|
||||
title=CONFIG_ENTRY_TITLE,
|
||||
data={
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 22,
|
||||
CONF_USERNAME: "username",
|
||||
CONF_PASSWORD: "password",
|
||||
CONF_PRIVATE_KEY_FILE: str(private_key),
|
||||
CONF_BACKUP_LOCATION: "backup_location",
|
||||
},
|
||||
)
|
||||
|
||||
config_entry.runtime_data = SFTPConfigEntryData(**config_entry.data)
|
||||
yield config_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_ssh_connection():
|
||||
"""Mock `SSHClientConnection` globally."""
|
||||
mock = SSHClientConnectionMock()
|
||||
|
||||
# We decorate from same decorator from asyncssh
|
||||
# It makes the callable an awaitable and context manager.
|
||||
@async_context_manager
|
||||
async def mock_connect(*args, **kwargs):
|
||||
"""Mock the asyncssh.connect function to return our mock directly."""
|
||||
return mock
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.sftp_storage.client.connect",
|
||||
side_effect=mock_connect,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.sftp_storage.config_flow.connect",
|
||||
side_effect=mock_connect,
|
||||
),
|
||||
):
|
||||
yield mock
|
||||
418
tests/components/sftp_storage/test_backup.py
Normal file
418
tests/components/sftp_storage/test_backup.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""Test the Backup SFTP Location platform."""
|
||||
|
||||
from io import StringIO
|
||||
import json
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from asyncssh.sftp import SFTPError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.sftp_storage.backup import (
|
||||
async_register_backup_agents_listener,
|
||||
)
|
||||
from homeassistant.components.sftp_storage.const import (
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .asyncssh_mock import SSHClientConnectionMock
|
||||
from .conftest import (
|
||||
BACKUP_METADATA,
|
||||
CONFIG_ENTRY_TITLE,
|
||||
TEST_AGENT_BACKUP,
|
||||
TEST_AGENT_ID,
|
||||
ComponentSetup,
|
||||
)
|
||||
|
||||
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def mock_setup_integration(
|
||||
setup_integration: ComponentSetup,
|
||||
) -> None:
|
||||
"""Set up the integration automatically for backup tests."""
|
||||
await setup_integration()
|
||||
|
||||
|
||||
def generate_result(metadata: dict) -> dict:
|
||||
"""Generates an expected result from metadata."""
|
||||
|
||||
expected_result: dict = metadata["metadata"].copy()
|
||||
expected_result["agents"] = {
|
||||
f"{DOMAIN}.{TEST_AGENT_ID}": {
|
||||
"protected": expected_result.pop("protected"),
|
||||
"size": expected_result.pop("size"),
|
||||
}
|
||||
}
|
||||
expected_result.update(
|
||||
{
|
||||
"failed_addons": [],
|
||||
"failed_agent_ids": [],
|
||||
"failed_folders": [],
|
||||
"with_automatic_settings": None,
|
||||
}
|
||||
)
|
||||
return expected_result
|
||||
|
||||
|
||||
async def test_agents_info(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test backup agent info."""
|
||||
client = await hass_ws_client(hass)
|
||||
|
||||
await client.send_json_auto_id({"type": "backup/agents/info"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"agents": [
|
||||
{"agent_id": "backup.local", "name": "local"},
|
||||
{"agent_id": f"{DOMAIN}.{TEST_AGENT_ID}", "name": CONFIG_ENTRY_TITLE},
|
||||
],
|
||||
}
|
||||
|
||||
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
await client.send_json_auto_id({"type": "backup/agents/info"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert (
|
||||
response["result"]
|
||||
== {"agents": [{"agent_id": "backup.local", "name": "local"}]}
|
||||
or config_entry.state == ConfigEntryState.NOT_LOADED
|
||||
)
|
||||
|
||||
|
||||
async def test_agents_list_backups(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent list backups."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
expected_result = generate_result(BACKUP_METADATA)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id({"type": "backup/info"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"]["agent_errors"] == {}
|
||||
assert response["result"]["backups"] == [expected_result]
|
||||
|
||||
|
||||
async def test_agents_list_backups_fail(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent list backups fails."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
mock_ssh_connection._sftp._mock_open._mock_read.side_effect = SFTPError(
|
||||
2, "Error message"
|
||||
)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id({"type": "backup/info"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"]["backups"] == []
|
||||
assert response["result"]["agent_errors"] == {
|
||||
f"{DOMAIN}.{TEST_AGENT_ID}": "Remote server error while attempting to list backups: Error message"
|
||||
}
|
||||
|
||||
|
||||
async def test_agents_list_backups_include_bad_metadata(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test agent list backups."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA, with_bad=True)
|
||||
expected_result = generate_result(BACKUP_METADATA)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id({"type": "backup/info"})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"]["agent_errors"] == {}
|
||||
assert response["result"]["backups"] == [expected_result]
|
||||
# Called two times, one for bad backup metadata and once for good
|
||||
assert mock_ssh_connection._sftp._mock_open._mock_read.call_count == 2
|
||||
assert (
|
||||
"Failed to load backup metadata from file: backup_location/invalid.metadata.json. Expecting value: line 1 column 1 (char 0)"
|
||||
in caplog.messages
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("backup_id", "expected_result"),
|
||||
[
|
||||
(TEST_AGENT_BACKUP.backup_id, generate_result(BACKUP_METADATA)),
|
||||
("12345", None),
|
||||
],
|
||||
ids=["found", "not_found"],
|
||||
)
|
||||
async def test_agents_get_backup(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
backup_id: str,
|
||||
expected_result: dict[str, Any] | None,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent get backup."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id})
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"]["backup"] == expected_result
|
||||
|
||||
|
||||
async def test_agents_download(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent download backup."""
|
||||
client = await hass_client()
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
resp = await client.get(
|
||||
f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}"
|
||||
)
|
||||
assert resp.status == 200
|
||||
assert await resp.content.read() == b"backup data"
|
||||
mock_ssh_connection._sftp._mock_open.close.assert_awaited()
|
||||
|
||||
|
||||
async def test_agents_download_fail(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent download backup fails."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
# This will cause `FileNotFoundError` exception in `BackupAgentClient.iter_file() method.`
|
||||
mock_ssh_connection._sftp._mock_exists.side_effect = [True, False]
|
||||
client = await hass_client()
|
||||
resp = await client.get(
|
||||
f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}"
|
||||
)
|
||||
assert resp.status == 404
|
||||
|
||||
# This will raise `RuntimeError` causing Internal Server Error, mimicking that the SFTP setup failed.
|
||||
mock_ssh_connection._sftp = None
|
||||
resp = await client.get(
|
||||
f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}"
|
||||
)
|
||||
assert resp.status == 500
|
||||
content = await resp.content.read()
|
||||
assert b"Internal Server Error" in content
|
||||
|
||||
|
||||
async def test_agents_download_metadata_not_found(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent download backup raises error if not found."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
mock_ssh_connection._sftp._mock_exists.return_value = False
|
||||
client = await hass_client()
|
||||
resp = await client.get(
|
||||
f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}"
|
||||
)
|
||||
assert resp.status == 404
|
||||
content = await resp.content.read()
|
||||
assert content.decode() == ""
|
||||
|
||||
|
||||
async def test_agents_upload(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent upload backup."""
|
||||
client = await hass_client()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=TEST_AGENT_BACKUP,
|
||||
),
|
||||
):
|
||||
resp = await client.post(
|
||||
f"/api/backup/upload?agent_id={DOMAIN}.{TEST_AGENT_ID}",
|
||||
data={"file": StringIO("test")},
|
||||
)
|
||||
|
||||
assert resp.status == 201
|
||||
assert f"Uploading backup: {TEST_AGENT_BACKUP.backup_id}" in caplog.text
|
||||
assert (
|
||||
f"Successfully uploaded backup id: {TEST_AGENT_BACKUP.backup_id}" in caplog.text
|
||||
)
|
||||
# Called write 2 times
|
||||
# 1. When writing backup file
|
||||
# 2. When writing metadata file
|
||||
assert mock_ssh_connection._sftp._mock_open._mock_write.call_count == 2
|
||||
|
||||
# This is 'backup file'
|
||||
assert (
|
||||
b"test"
|
||||
in mock_ssh_connection._sftp._mock_open._mock_write.call_args_list[0].args
|
||||
)
|
||||
|
||||
# This is backup metadata
|
||||
uploaded_metadata = json.loads(
|
||||
mock_ssh_connection._sftp._mock_open._mock_write.call_args_list[1].args[0]
|
||||
)["metadata"]
|
||||
assert uploaded_metadata == BACKUP_METADATA["metadata"]
|
||||
|
||||
|
||||
async def test_agents_upload_fail(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent upload backup fails."""
|
||||
client = await hass_client()
|
||||
mock_ssh_connection._sftp._mock_open._mock_write.side_effect = SFTPError(
|
||||
2, "Error message"
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=TEST_AGENT_BACKUP,
|
||||
),
|
||||
):
|
||||
resp = await client.post(
|
||||
f"/api/backup/upload?agent_id={DOMAIN}.{TEST_AGENT_ID}",
|
||||
data={"file": StringIO("test")},
|
||||
)
|
||||
|
||||
assert resp.status == 201
|
||||
assert (
|
||||
f"Unexpected error for {DOMAIN}.{TEST_AGENT_ID}: Error message"
|
||||
in caplog.messages
|
||||
)
|
||||
|
||||
|
||||
async def test_agents_delete(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent delete backup."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/delete",
|
||||
"backup_id": TEST_AGENT_BACKUP.backup_id,
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"] == {"agent_errors": {}}
|
||||
|
||||
# Called 2 times, to remove metadata and backup file.
|
||||
assert mock_ssh_connection._sftp._mock_unlink.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exists_side_effect", "expected_result"),
|
||||
[
|
||||
(
|
||||
[True, False],
|
||||
{"agent_errors": {}},
|
||||
), # First `True` is to confirm the metadata file exists
|
||||
(
|
||||
SFTPError(0, "manual"),
|
||||
{
|
||||
"agent_errors": {
|
||||
f"{DOMAIN}.{TEST_AGENT_ID}": f"Failed to delete backup id: {TEST_AGENT_BACKUP.backup_id}: manual"
|
||||
}
|
||||
},
|
||||
),
|
||||
],
|
||||
ids=["file_not_found_exc", "sftp_error_exc"],
|
||||
)
|
||||
async def test_agents_delete_fail(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
exists_side_effect: bool | Exception,
|
||||
expected_result: dict[str, dict[str, str]],
|
||||
) -> None:
|
||||
"""Test agent delete backup fails."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
mock_ssh_connection._sftp._mock_exists.side_effect = exists_side_effect
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/delete",
|
||||
"backup_id": TEST_AGENT_BACKUP.backup_id,
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"] == expected_result
|
||||
|
||||
|
||||
async def test_agents_delete_not_found(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test agent delete backup not found."""
|
||||
mock_ssh_connection.mock_setup_backup(BACKUP_METADATA)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
backup_id = "1234"
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/delete",
|
||||
"backup_id": backup_id,
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
|
||||
assert response["success"]
|
||||
assert response["result"] == {"agent_errors": {}}
|
||||
|
||||
|
||||
async def test_listeners_get_cleaned_up(hass: HomeAssistant) -> None:
|
||||
"""Test listener gets cleaned up."""
|
||||
listener = MagicMock()
|
||||
remove_listener = async_register_backup_agents_listener(hass, listener=listener)
|
||||
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS] = [
|
||||
listener
|
||||
] # make sure it's the last listener
|
||||
remove_listener()
|
||||
|
||||
assert DATA_BACKUP_AGENT_LISTENERS not in hass.data
|
||||
192
tests/components/sftp_storage/test_config_flow.py
Normal file
192
tests/components/sftp_storage/test_config_flow.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""Tests config_flow."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from tempfile import NamedTemporaryFile
|
||||
from unittest.mock import patch
|
||||
|
||||
from asyncssh import KeyImportError, generate_private_key
|
||||
from asyncssh.misc import PermissionDenied
|
||||
from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.sftp_storage.config_flow import (
|
||||
SFTPStorageInvalidPrivateKey,
|
||||
SFTPStorageMissingPasswordOrPkey,
|
||||
)
|
||||
from homeassistant.components.sftp_storage.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PRIVATE_KEY_FILE,
|
||||
CONF_USERNAME,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from .conftest import USER_INPUT, SSHClientConnectionMock
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
type ComponentSetup = Callable[[], Awaitable[None]]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_process_uploaded_file():
|
||||
"""Mocks ability to process uploaded private key."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.sftp_storage.config_flow.process_uploaded_file"
|
||||
) as mock_process_uploaded_file,
|
||||
patch("shutil.move") as mock_shutil_move,
|
||||
NamedTemporaryFile() as f,
|
||||
):
|
||||
pkey = generate_private_key("ssh-rsa")
|
||||
f.write(pkey.export_private_key("pkcs8-pem"))
|
||||
f.flush()
|
||||
mock_process_uploaded_file.return_value.__enter__.return_value = f.name
|
||||
mock_shutil_move.return_value = f.name
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@pytest.mark.usefixtures("mock_process_uploaded_file")
|
||||
@pytest.mark.usefixtures("mock_ssh_connection")
|
||||
async def test_backup_sftp_full_flow(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test the full backup_sftp config flow with valid user input."""
|
||||
|
||||
user_input = USER_INPUT.copy()
|
||||
# Start the configuration flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
# The first step should be the "user" form.
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input
|
||||
)
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
# Verify that a new config entry is created.
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
expected_title = f"{user_input[CONF_USERNAME]}@{user_input[CONF_HOST]}"
|
||||
assert result["title"] == expected_title
|
||||
|
||||
# Make sure to match the `private_key_file` from entry
|
||||
user_input[CONF_PRIVATE_KEY_FILE] = result["data"][CONF_PRIVATE_KEY_FILE]
|
||||
|
||||
assert result["data"] == user_input
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@pytest.mark.usefixtures("mock_process_uploaded_file")
|
||||
@pytest.mark.usefixtures("mock_ssh_connection")
|
||||
async def test_already_configured(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test successful failure of already added config entry."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], USER_INPUT
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception_type", "error_base"),
|
||||
[
|
||||
(OSError, "os_error"),
|
||||
(SFTPStorageInvalidPrivateKey, "invalid_key"),
|
||||
(PermissionDenied, "permission_denied"),
|
||||
(SFTPStorageMissingPasswordOrPkey, "key_or_password_needed"),
|
||||
(SFTPNoSuchFile, "sftp_no_such_file"),
|
||||
(SFTPPermissionDenied, "sftp_permission_denied"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@pytest.mark.usefixtures("mock_process_uploaded_file")
|
||||
async def test_config_flow_exceptions(
|
||||
exception_type: Exception,
|
||||
error_base: str,
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
) -> None:
|
||||
"""Test successful failure of already added config entry."""
|
||||
|
||||
mock_ssh_connection._sftp._mock_chdir.side_effect = exception_type("Error message.")
|
||||
|
||||
# config_entry.add_to_hass(hass)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], USER_INPUT
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] and result["errors"]["base"] == error_base
|
||||
|
||||
# Recover from the error
|
||||
mock_ssh_connection._sftp._mock_chdir.side_effect = None
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], USER_INPUT
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@pytest.mark.usefixtures("mock_process_uploaded_file")
|
||||
async def test_config_entry_error(hass: HomeAssistant) -> None:
|
||||
"""Test config flow with raised `KeyImportError`."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.sftp_storage.config_flow.SSHClientConnectionOptions",
|
||||
side_effect=KeyImportError("Invalid key"),
|
||||
),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], USER_INPUT
|
||||
)
|
||||
assert "errors" in result and result["errors"]["base"] == "invalid_key"
|
||||
|
||||
user_input = USER_INPUT.copy()
|
||||
user_input[CONF_PASSWORD] = ""
|
||||
del user_input[CONF_PRIVATE_KEY_FILE]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input
|
||||
)
|
||||
assert "errors" in result and result["errors"]["base"] == "key_or_password_needed"
|
||||
193
tests/components/sftp_storage/test_init.py
Normal file
193
tests/components/sftp_storage/test_init.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Tests for SFTP Storage."""
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from asyncssh.sftp import SFTPPermissionDenied
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.sftp_storage import SFTPConfigEntryData
|
||||
from homeassistant.components.sftp_storage.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.ulid import ulid
|
||||
|
||||
from .asyncssh_mock import SSHClientConnectionMock
|
||||
from .conftest import (
|
||||
CONF_BACKUP_LOCATION,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PRIVATE_KEY_FILE,
|
||||
CONF_USERNAME,
|
||||
USER_INPUT,
|
||||
ComponentSetup,
|
||||
private_key_file,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_ssh_connection")
|
||||
async def test_setup_and_unload(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test successful setup and unload."""
|
||||
|
||||
# Patch the `exists` function of Path so that we can also
|
||||
# test the `homeassistant.components.sftp_storage.client.get_client_keys()` function
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.sftp_storage.client.SSHClientConnectionOptions"
|
||||
),
|
||||
patch("pathlib.Path.exists", return_value=True),
|
||||
):
|
||||
await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state is ConfigEntryState.LOADED
|
||||
|
||||
await hass.config_entries.async_unload(entries[0].entry_id)
|
||||
|
||||
assert entries[0].state is ConfigEntryState.NOT_LOADED
|
||||
assert (
|
||||
f"Unloading {DOMAIN} integration for host {entries[0].data[CONF_USERNAME]}@{entries[0].data[CONF_HOST]}"
|
||||
in caplog.messages
|
||||
)
|
||||
|
||||
|
||||
async def test_setup_error(
|
||||
mock_ssh_connection: SSHClientConnectionMock,
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
) -> None:
|
||||
"""Test setup error."""
|
||||
mock_ssh_connection._sftp._mock_chdir.side_effect = SFTPPermissionDenied(
|
||||
"Error message"
|
||||
)
|
||||
await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
|
||||
async def test_setup_unexpected_error(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test setup error."""
|
||||
with patch(
|
||||
"homeassistant.components.sftp_storage.client.connect",
|
||||
side_effect=OSError("Error message"),
|
||||
):
|
||||
await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state is ConfigEntryState.SETUP_ERROR
|
||||
assert (
|
||||
"Failure while attempting to establish SSH connection. Please check SSH credentials and if changed, re-install the integration"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
async def test_async_remove_entry(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
) -> None:
|
||||
"""Test async_remove_entry."""
|
||||
# Setup default config entry
|
||||
await setup_integration()
|
||||
|
||||
# Setup additional config entry
|
||||
agent_id = ulid()
|
||||
with private_key_file(hass) as private_key:
|
||||
new_config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
entry_id=agent_id,
|
||||
unique_id=agent_id,
|
||||
title="another@192.168.0.100",
|
||||
data={
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 22,
|
||||
CONF_USERNAME: "another",
|
||||
CONF_PASSWORD: "password",
|
||||
CONF_PRIVATE_KEY_FILE: str(private_key),
|
||||
CONF_BACKUP_LOCATION: "backup_location",
|
||||
},
|
||||
)
|
||||
new_config_entry.add_to_hass(hass)
|
||||
await setup_integration(new_config_entry)
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 2
|
||||
|
||||
config_entry = entries[0]
|
||||
private_key = Path(config_entry.data[CONF_PRIVATE_KEY_FILE])
|
||||
new_private_key = Path(new_config_entry.data[CONF_PRIVATE_KEY_FILE])
|
||||
|
||||
# Make sure private keys from both configs exists
|
||||
assert private_key.parent == new_private_key.parent
|
||||
assert private_key.exists()
|
||||
assert new_private_key.exists()
|
||||
|
||||
# Remove first config entry - the private key from second will still be in filesystem
|
||||
# as well as integration storage directory
|
||||
assert await hass.config_entries.async_remove(config_entry.entry_id)
|
||||
assert not private_key.exists()
|
||||
assert new_private_key.exists()
|
||||
assert new_private_key.parent.exists()
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
# Remove the second config entry, ensuring all files and integration storage directory removed.
|
||||
assert await hass.config_entries.async_remove(new_config_entry.entry_id)
|
||||
assert not new_private_key.exists()
|
||||
assert not new_private_key.parent.exists()
|
||||
|
||||
assert hass.config_entries.async_entries(DOMAIN) == []
|
||||
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("patch_target", "expected_logs"),
|
||||
[
|
||||
(
|
||||
"os.unlink",
|
||||
[
|
||||
"Failed to remove private key",
|
||||
f"Storage directory for {DOMAIN} integration is not empty",
|
||||
],
|
||||
),
|
||||
("os.rmdir", ["Error occurred while removing directory"]),
|
||||
],
|
||||
)
|
||||
async def test_async_remove_entry_errors(
|
||||
patch_target: str,
|
||||
expected_logs: list[str],
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test async_remove_entry."""
|
||||
# Setup default config entry
|
||||
await setup_integration()
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert len(entries) == 1
|
||||
|
||||
config_entry = entries[0]
|
||||
|
||||
with patch(patch_target, side_effect=OSError(13, "Permission denied")):
|
||||
await hass.config_entries.async_remove(config_entry.entry_id)
|
||||
for logline in expected_logs:
|
||||
assert logline in caplog.text
|
||||
|
||||
|
||||
async def test_config_entry_data_password_hidden() -> None:
|
||||
"""Test hiding password in `SFTPConfigEntryData` string representation."""
|
||||
user_input = USER_INPUT.copy()
|
||||
entry_data = SFTPConfigEntryData(**user_input)
|
||||
assert "password=" not in str(entry_data)
|
||||
Reference in New Issue
Block a user