mirror of
https://github.com/home-assistant/core.git
synced 2026-05-08 17:49:37 +01:00
Sonarr service calls instead of sensor attributes (#161199)
Co-authored-by: Joostlek <joostlek@outlook.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -18,7 +18,9 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_BASE_PATH,
|
||||
@@ -39,9 +41,18 @@ from .coordinator import (
|
||||
StatusDataUpdateCoordinator,
|
||||
WantedDataUpdateCoordinator,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Sonarr integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Sonarr from a config entry."""
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Constants for Sonarr."""
|
||||
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "sonarr"
|
||||
DOMAIN: Final = "sonarr"
|
||||
|
||||
# Config Keys
|
||||
CONF_BASE_PATH = "base_path"
|
||||
@@ -17,5 +18,20 @@ DEFAULT_NAME = "Sonarr"
|
||||
DEFAULT_UPCOMING_DAYS = 1
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_WANTED_MAX_ITEMS = 50
|
||||
DEFAULT_MAX_RECORDS: Final = 20
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
# Service names
|
||||
SERVICE_GET_SERIES: Final = "get_series"
|
||||
SERVICE_GET_EPISODES: Final = "get_episodes"
|
||||
SERVICE_GET_QUEUE: Final = "get_queue"
|
||||
SERVICE_GET_DISKSPACE: Final = "get_diskspace"
|
||||
SERVICE_GET_UPCOMING: Final = "get_upcoming"
|
||||
SERVICE_GET_WANTED: Final = "get_wanted"
|
||||
|
||||
# Service attributes
|
||||
ATTR_SHOWS: Final = "shows"
|
||||
ATTR_DISKS: Final = "disks"
|
||||
ATTR_EPISODES: Final = "episodes"
|
||||
ATTR_ENTRY_ID: Final = "entry_id"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import TypeVar, cast
|
||||
|
||||
@@ -40,15 +41,31 @@ SonarrDataT = TypeVar(
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SonarrData:
|
||||
"""Sonarr data type."""
|
||||
|
||||
upcoming: CalendarDataUpdateCoordinator
|
||||
commands: CommandsDataUpdateCoordinator
|
||||
diskspace: DiskSpaceDataUpdateCoordinator
|
||||
queue: QueueDataUpdateCoordinator
|
||||
series: SeriesDataUpdateCoordinator
|
||||
status: StatusDataUpdateCoordinator
|
||||
wanted: WantedDataUpdateCoordinator
|
||||
|
||||
|
||||
type SonarrConfigEntry = ConfigEntry[SonarrData]
|
||||
|
||||
|
||||
class SonarrDataUpdateCoordinator(DataUpdateCoordinator[SonarrDataT]):
|
||||
"""Data update coordinator for the Sonarr integration."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
config_entry: SonarrConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: SonarrConfigEntry,
|
||||
host_configuration: PyArrHostConfiguration,
|
||||
api_client: SonarrClient,
|
||||
) -> None:
|
||||
|
||||
@@ -0,0 +1,416 @@
|
||||
"""Helper functions for Sonarr."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiopyarr import (
|
||||
Diskspace,
|
||||
SonarrCalendar,
|
||||
SonarrEpisode,
|
||||
SonarrQueue,
|
||||
SonarrSeries,
|
||||
SonarrWantedMissing,
|
||||
)
|
||||
|
||||
|
||||
def format_queue_item(item: Any, base_url: str | None = None) -> dict[str, Any]:
|
||||
"""Format a single queue item."""
|
||||
# Calculate progress
|
||||
remaining = 1 if item.size == 0 else item.sizeleft / item.size
|
||||
remaining_pct = 100 * (1 - remaining)
|
||||
|
||||
result: dict[str, Any] = {
|
||||
"id": item.id,
|
||||
"series_id": getattr(item, "seriesId", None),
|
||||
"episode_id": getattr(item, "episodeId", None),
|
||||
"title": item.series.title,
|
||||
"download_title": item.title,
|
||||
"season_number": getattr(item, "seasonNumber", None),
|
||||
"progress": f"{remaining_pct:.2f}%",
|
||||
"size": item.size,
|
||||
"size_left": item.sizeleft,
|
||||
"status": item.status,
|
||||
"tracked_download_status": getattr(item, "trackedDownloadStatus", None),
|
||||
"tracked_download_state": getattr(item, "trackedDownloadState", None),
|
||||
"download_client": getattr(item, "downloadClient", None),
|
||||
"download_id": getattr(item, "downloadId", None),
|
||||
"indexer": getattr(item, "indexer", None),
|
||||
"protocol": str(getattr(item, "protocol", None)),
|
||||
"episode_has_file": getattr(item, "episodeHasFile", None),
|
||||
"estimated_completion_time": str(
|
||||
getattr(item, "estimatedCompletionTime", None)
|
||||
),
|
||||
"time_left": str(getattr(item, "timeleft", None)),
|
||||
}
|
||||
|
||||
# Add episode information from the episode object if available
|
||||
if episode := getattr(item, "episode", None):
|
||||
result["episode_number"] = getattr(episode, "episodeNumber", None)
|
||||
result["episode_title"] = getattr(episode, "title", None)
|
||||
# Add formatted identifier like the sensor uses (if we have both season and episode)
|
||||
if result["season_number"] is not None and result["episode_number"] is not None:
|
||||
result["episode_identifier"] = (
|
||||
f"S{result['season_number']:02d}E{result['episode_number']:02d}"
|
||||
)
|
||||
|
||||
# Add quality information if available
|
||||
if quality := getattr(item, "quality", None):
|
||||
result["quality"] = quality.quality.name
|
||||
|
||||
# Add language information if available
|
||||
if languages := getattr(item, "languages", None):
|
||||
result["languages"] = [lang["name"] for lang in languages]
|
||||
|
||||
# Add custom format score if available
|
||||
if custom_format_score := getattr(item, "customFormatScore", None):
|
||||
result["custom_format_score"] = custom_format_score
|
||||
|
||||
# Add series images if available
|
||||
if images := getattr(item.series, "images", None):
|
||||
result["images"] = {}
|
||||
for image in images:
|
||||
cover_type = image.coverType
|
||||
# Prefer remoteUrl (public TVDB URL) over local path
|
||||
if remote_url := getattr(image, "remoteUrl", None):
|
||||
result["images"][cover_type] = remote_url
|
||||
elif base_url and (url := getattr(image, "url", None)):
|
||||
result["images"][cover_type] = f"{base_url.rstrip('/')}{url}"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_queue(
|
||||
queue: SonarrQueue, base_url: str | None = None
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format queue for service response."""
|
||||
# Group queue items by download ID to handle season packs
|
||||
downloads: dict[str, list[Any]] = {}
|
||||
for item in queue.records:
|
||||
download_id = getattr(item, "downloadId", None)
|
||||
if download_id:
|
||||
if download_id not in downloads:
|
||||
downloads[download_id] = []
|
||||
downloads[download_id].append(item)
|
||||
|
||||
shows = {}
|
||||
for items in downloads.values():
|
||||
if len(items) == 1:
|
||||
# Single episode download
|
||||
item = items[0]
|
||||
shows[item.title] = format_queue_item(item, base_url)
|
||||
else:
|
||||
# Multiple episodes (season pack) - use first item for main data
|
||||
item = items[0]
|
||||
formatted = format_queue_item(item, base_url)
|
||||
|
||||
# Get all episode numbers for this download
|
||||
episode_numbers = sorted(
|
||||
getattr(i.episode, "episodeNumber", 0)
|
||||
for i in items
|
||||
if hasattr(i, "episode")
|
||||
)
|
||||
|
||||
# Format as season pack
|
||||
if episode_numbers:
|
||||
min_ep = min(episode_numbers)
|
||||
max_ep = max(episode_numbers)
|
||||
formatted["is_season_pack"] = True
|
||||
formatted["episode_count"] = len(episode_numbers)
|
||||
formatted["episode_range"] = f"E{min_ep:02d}-E{max_ep:02d}"
|
||||
# Update identifier to show it's a season pack
|
||||
if formatted.get("season_number") is not None:
|
||||
formatted["episode_identifier"] = (
|
||||
f"S{formatted['season_number']:02d} "
|
||||
f"({len(episode_numbers)} episodes)"
|
||||
)
|
||||
|
||||
shows[item.title] = formatted
|
||||
|
||||
return shows
|
||||
|
||||
|
||||
def format_episode_item(
|
||||
series: SonarrSeries, episode_data: dict[str, Any], base_url: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Format a single episode item."""
|
||||
result: dict[str, Any] = {
|
||||
"id": episode_data.get("id"),
|
||||
"episode_number": episode_data.get("episodeNumber"),
|
||||
"season_number": episode_data.get("seasonNumber"),
|
||||
"title": episode_data.get("title"),
|
||||
"air_date": str(episode_data.get("airDate", "")),
|
||||
"overview": episode_data.get("overview"),
|
||||
"has_file": episode_data.get("hasFile", False),
|
||||
"monitored": episode_data.get("monitored", False),
|
||||
}
|
||||
|
||||
# Add episode images if available
|
||||
if images := episode_data.get("images"):
|
||||
result["images"] = {}
|
||||
for image in images:
|
||||
cover_type = image.coverType
|
||||
# Prefer remoteUrl (public TVDB URL) over local path
|
||||
if remote_url := getattr(image, "remoteUrl", None):
|
||||
result["images"][cover_type] = remote_url
|
||||
elif base_url and (url := getattr(image, "url", None)):
|
||||
result["images"][cover_type] = f"{base_url.rstrip('/')}{url}"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_series(
|
||||
series_list: list[SonarrSeries], base_url: str | None = None
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format series list for service response."""
|
||||
formatted_shows = {}
|
||||
|
||||
for series in series_list:
|
||||
series_title = series.title
|
||||
formatted_shows[series_title] = {
|
||||
"id": series.id,
|
||||
"year": series.year,
|
||||
"tvdb_id": getattr(series, "tvdbId", None),
|
||||
"imdb_id": getattr(series, "imdbId", None),
|
||||
"status": series.status,
|
||||
"monitored": series.monitored,
|
||||
}
|
||||
|
||||
# Add episode statistics if available (like the sensor shows)
|
||||
if statistics := getattr(series, "statistics", None):
|
||||
episode_file_count = getattr(statistics, "episodeFileCount", None)
|
||||
episode_count = getattr(statistics, "episodeCount", None)
|
||||
formatted_shows[series_title]["episode_file_count"] = episode_file_count
|
||||
formatted_shows[series_title]["episode_count"] = episode_count
|
||||
# Only format episodes_info if we have valid data
|
||||
if episode_file_count is not None and episode_count is not None:
|
||||
formatted_shows[series_title]["episodes_info"] = (
|
||||
f"{episode_file_count}/{episode_count} Episodes"
|
||||
)
|
||||
else:
|
||||
formatted_shows[series_title]["episodes_info"] = None
|
||||
|
||||
# Add series images if available
|
||||
if images := getattr(series, "images", None):
|
||||
images_dict: dict[str, str] = {}
|
||||
for image in images:
|
||||
cover_type = image.coverType
|
||||
# Prefer remoteUrl (public TVDB URL) over local path
|
||||
if remote_url := getattr(image, "remoteUrl", None):
|
||||
images_dict[cover_type] = remote_url
|
||||
elif base_url and (url := getattr(image, "url", None)):
|
||||
images_dict[cover_type] = f"{base_url.rstrip('/')}{url}"
|
||||
formatted_shows[series_title]["images"] = images_dict
|
||||
|
||||
return formatted_shows
|
||||
|
||||
|
||||
# Space unit conversion factors (divisors from bytes)
|
||||
SPACE_UNITS: dict[str, int] = {
|
||||
"bytes": 1,
|
||||
"kb": 1000,
|
||||
"kib": 1024,
|
||||
"mb": 1000**2,
|
||||
"mib": 1024**2,
|
||||
"gb": 1000**3,
|
||||
"gib": 1024**3,
|
||||
"tb": 1000**4,
|
||||
"tib": 1024**4,
|
||||
"pb": 1000**5,
|
||||
"pib": 1024**5,
|
||||
}
|
||||
|
||||
|
||||
def format_diskspace(
|
||||
disks: list[Diskspace], space_unit: str = "bytes"
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format diskspace for service response.
|
||||
|
||||
Args:
|
||||
disks: List of disk space objects from Sonarr.
|
||||
space_unit: Unit for space values (bytes, kb, kib, mb, mib, gb, gib, tb, tib, pb, pib).
|
||||
|
||||
Returns:
|
||||
Dictionary of disk information keyed by path.
|
||||
"""
|
||||
result = {}
|
||||
divisor = SPACE_UNITS.get(space_unit, 1)
|
||||
|
||||
for disk in disks:
|
||||
path = disk.path
|
||||
free_space = disk.freeSpace / divisor
|
||||
total_space = disk.totalSpace / divisor
|
||||
|
||||
result[path] = {
|
||||
"path": path,
|
||||
"label": getattr(disk, "label", None) or "",
|
||||
"free_space": free_space,
|
||||
"total_space": total_space,
|
||||
"unit": space_unit,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _format_series_images(series: Any, base_url: str | None = None) -> dict[str, str]:
|
||||
"""Format series images."""
|
||||
images_dict: dict[str, str] = {}
|
||||
if images := getattr(series, "images", None):
|
||||
for image in images:
|
||||
cover_type = image.coverType
|
||||
# Prefer remoteUrl (public TVDB URL) over local path
|
||||
if remote_url := getattr(image, "remoteUrl", None):
|
||||
images_dict[cover_type] = remote_url
|
||||
elif base_url and (url := getattr(image, "url", None)):
|
||||
images_dict[cover_type] = f"{base_url.rstrip('/')}{url}"
|
||||
return images_dict
|
||||
|
||||
|
||||
def format_upcoming_item(
|
||||
episode: SonarrCalendar, base_url: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Format a single upcoming episode item."""
|
||||
result: dict[str, Any] = {
|
||||
"id": episode.id,
|
||||
"series_id": episode.seriesId,
|
||||
"season_number": episode.seasonNumber,
|
||||
"episode_number": episode.episodeNumber,
|
||||
"episode_identifier": f"S{episode.seasonNumber:02d}E{episode.episodeNumber:02d}",
|
||||
"title": episode.title,
|
||||
"air_date": str(getattr(episode, "airDate", None)),
|
||||
"air_date_utc": str(getattr(episode, "airDateUtc", None)),
|
||||
"overview": getattr(episode, "overview", None),
|
||||
"has_file": getattr(episode, "hasFile", False),
|
||||
"monitored": getattr(episode, "monitored", True),
|
||||
"runtime": getattr(episode, "runtime", None),
|
||||
"finale_type": getattr(episode, "finaleType", None),
|
||||
}
|
||||
|
||||
# Add series information
|
||||
if series := getattr(episode, "series", None):
|
||||
result["series_title"] = series.title
|
||||
result["series_year"] = getattr(series, "year", None)
|
||||
result["series_tvdb_id"] = getattr(series, "tvdbId", None)
|
||||
result["series_imdb_id"] = getattr(series, "imdbId", None)
|
||||
result["series_status"] = getattr(series, "status", None)
|
||||
result["network"] = getattr(series, "network", None)
|
||||
result["images"] = _format_series_images(series, base_url)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_upcoming(
|
||||
calendar: list[SonarrCalendar], base_url: str | None = None
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format upcoming calendar for service response."""
|
||||
episodes = {}
|
||||
|
||||
for episode in calendar:
|
||||
# Create a unique key combining series title and episode identifier
|
||||
series_title = episode.series.title if hasattr(episode, "series") else "Unknown"
|
||||
identifier = f"S{episode.seasonNumber:02d}E{episode.episodeNumber:02d}"
|
||||
key = f"{series_title} {identifier}"
|
||||
episodes[key] = format_upcoming_item(episode, base_url)
|
||||
|
||||
return episodes
|
||||
|
||||
|
||||
def format_wanted_item(item: Any, base_url: str | None = None) -> dict[str, Any]:
|
||||
"""Format a single wanted episode item."""
|
||||
result: dict[str, Any] = {
|
||||
"id": item.id,
|
||||
"series_id": item.seriesId,
|
||||
"season_number": item.seasonNumber,
|
||||
"episode_number": item.episodeNumber,
|
||||
"episode_identifier": f"S{item.seasonNumber:02d}E{item.episodeNumber:02d}",
|
||||
"title": item.title,
|
||||
"air_date": str(getattr(item, "airDate", None)),
|
||||
"air_date_utc": str(getattr(item, "airDateUtc", None)),
|
||||
"overview": getattr(item, "overview", None),
|
||||
"has_file": getattr(item, "hasFile", False),
|
||||
"monitored": getattr(item, "monitored", True),
|
||||
"runtime": getattr(item, "runtime", None),
|
||||
"tvdb_id": getattr(item, "tvdbId", None),
|
||||
}
|
||||
|
||||
# Add series information
|
||||
if series := getattr(item, "series", None):
|
||||
result["series_title"] = series.title
|
||||
result["series_year"] = getattr(series, "year", None)
|
||||
result["series_tvdb_id"] = getattr(series, "tvdbId", None)
|
||||
result["series_imdb_id"] = getattr(series, "imdbId", None)
|
||||
result["series_status"] = getattr(series, "status", None)
|
||||
result["network"] = getattr(series, "network", None)
|
||||
result["images"] = _format_series_images(series, base_url)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_wanted(
|
||||
wanted: SonarrWantedMissing, base_url: str | None = None
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format wanted missing episodes for service response."""
|
||||
episodes = {}
|
||||
|
||||
for item in wanted.records:
|
||||
# Create a unique key combining series title and episode identifier
|
||||
series_title = (
|
||||
item.series.title if hasattr(item, "series") and item.series else "Unknown"
|
||||
)
|
||||
identifier = f"S{item.seasonNumber:02d}E{item.episodeNumber:02d}"
|
||||
key = f"{series_title} {identifier}"
|
||||
episodes[key] = format_wanted_item(item, base_url)
|
||||
|
||||
return episodes
|
||||
|
||||
|
||||
def format_episode(episode: SonarrEpisode) -> dict[str, Any]:
|
||||
"""Format a single episode from a series."""
|
||||
result: dict[str, Any] = {
|
||||
"id": episode.id,
|
||||
"series_id": episode.seriesId,
|
||||
"tvdb_id": getattr(episode, "tvdbId", None),
|
||||
"season_number": episode.seasonNumber,
|
||||
"episode_number": episode.episodeNumber,
|
||||
"episode_identifier": f"S{episode.seasonNumber:02d}E{episode.episodeNumber:02d}",
|
||||
"title": episode.title,
|
||||
"air_date": str(getattr(episode, "airDate", None)),
|
||||
"air_date_utc": str(getattr(episode, "airDateUtc", None)),
|
||||
"has_file": getattr(episode, "hasFile", False),
|
||||
"monitored": getattr(episode, "monitored", False),
|
||||
"runtime": getattr(episode, "runtime", None),
|
||||
"episode_file_id": getattr(episode, "episodeFileId", None),
|
||||
}
|
||||
|
||||
# Add overview if available (not always present)
|
||||
if overview := getattr(episode, "overview", None):
|
||||
result["overview"] = overview
|
||||
|
||||
# Add finale type if applicable
|
||||
if finale_type := getattr(episode, "finaleType", None):
|
||||
result["finale_type"] = finale_type
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_episodes(
|
||||
episodes: list[SonarrEpisode], season_number: int | None = None
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Format episodes list for service response.
|
||||
|
||||
Args:
|
||||
episodes: List of episodes to format.
|
||||
season_number: Optional season number to filter by.
|
||||
|
||||
Returns:
|
||||
Dictionary of episodes keyed by episode identifier (e.g., "S01E01").
|
||||
"""
|
||||
result = {}
|
||||
|
||||
for episode in episodes:
|
||||
# Filter by season if specified
|
||||
if season_number is not None and episode.seasonNumber != season_number:
|
||||
continue
|
||||
|
||||
identifier = f"S{episode.seasonNumber:02d}E{episode.episodeNumber:02d}"
|
||||
result[identifier] = format_episode(episode)
|
||||
|
||||
return result
|
||||
@@ -20,5 +20,25 @@
|
||||
"default": "mdi:television"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_diskspace": {
|
||||
"service": "mdi:harddisk"
|
||||
},
|
||||
"get_episodes": {
|
||||
"service": "mdi:filmstrip"
|
||||
},
|
||||
"get_queue": {
|
||||
"service": "mdi:download"
|
||||
},
|
||||
"get_series": {
|
||||
"service": "mdi:television"
|
||||
},
|
||||
"get_upcoming": {
|
||||
"service": "mdi:calendar-clock"
|
||||
},
|
||||
"get_wanted": {
|
||||
"service": "mdi:magnify"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ class SonarrSensorEntityDescriptionMixIn(Generic[SonarrDataT]):
|
||||
value_fn: Callable[[SonarrDataT], StateType]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class SonarrSensorEntityDescription(
|
||||
SensorEntityDescription, SonarrSensorEntityDescriptionMixIn[SonarrDataT]
|
||||
):
|
||||
@@ -162,6 +162,7 @@ class SonarrSensor(SonarrEntity[SonarrDataT], SensorEntity):
|
||||
coordinator: SonarrDataUpdateCoordinator[SonarrDataT]
|
||||
entity_description: SonarrSensorEntityDescription[SonarrDataT]
|
||||
|
||||
# Note: Sensor extra_state_attributes are deprecated and will be removed in 2026.9
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
"""Return the state attributes of the entity."""
|
||||
|
||||
@@ -0,0 +1,284 @@
|
||||
"""Define services for the Sonarr integration."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from aiopyarr import exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
ATTR_DISKS,
|
||||
ATTR_ENTRY_ID,
|
||||
ATTR_EPISODES,
|
||||
ATTR_SHOWS,
|
||||
DEFAULT_UPCOMING_DAYS,
|
||||
DOMAIN,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
SERVICE_GET_EPISODES,
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_GET_SERIES,
|
||||
SERVICE_GET_UPCOMING,
|
||||
SERVICE_GET_WANTED,
|
||||
)
|
||||
from .coordinator import SonarrConfigEntry
|
||||
from .helpers import (
|
||||
format_diskspace,
|
||||
format_episodes,
|
||||
format_queue,
|
||||
format_series,
|
||||
format_upcoming,
|
||||
format_wanted,
|
||||
)
|
||||
|
||||
# Service parameter constants
|
||||
CONF_DAYS = "days"
|
||||
CONF_MAX_ITEMS = "max_items"
|
||||
CONF_SERIES_ID = "series_id"
|
||||
CONF_SEASON_NUMBER = "season_number"
|
||||
CONF_SPACE_UNIT = "space_unit"
|
||||
|
||||
# Valid space units
|
||||
SPACE_UNITS = ["bytes", "kb", "kib", "mb", "mib", "gb", "gib", "tb", "tib", "pb", "pib"]
|
||||
DEFAULT_SPACE_UNIT = "bytes"
|
||||
|
||||
# Default values - 0 means no limit
|
||||
DEFAULT_MAX_ITEMS = 0
|
||||
|
||||
SERVICE_BASE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ENTRY_ID): selector.ConfigEntrySelector(
|
||||
{"integration": DOMAIN}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_SERIES_SCHEMA = SERVICE_BASE_SCHEMA
|
||||
|
||||
SERVICE_GET_EPISODES_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SERIES_ID): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(CONF_SEASON_NUMBER): vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_QUEUE_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_MAX_ITEMS, default=DEFAULT_MAX_ITEMS): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=500)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_DISKSPACE_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_SPACE_UNIT, default=DEFAULT_SPACE_UNIT): vol.In(SPACE_UNITS),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_UPCOMING_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_DAYS, default=DEFAULT_UPCOMING_DAYS): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=30)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_WANTED_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_MAX_ITEMS, default=DEFAULT_MAX_ITEMS): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=500)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _get_config_entry_from_service_data(call: ServiceCall) -> SonarrConfigEntry:
|
||||
"""Return config entry for entry id."""
|
||||
config_entry_id: str = call.data[ATTR_ENTRY_ID]
|
||||
if not (entry := call.hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": config_entry_id},
|
||||
)
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": entry.title},
|
||||
)
|
||||
return cast(SonarrConfigEntry, entry)
|
||||
|
||||
|
||||
async def _handle_api_errors[_T](func: Callable[[], Awaitable[_T]]) -> _T:
|
||||
"""Handle API errors and raise HomeAssistantError with user-friendly messages."""
|
||||
try:
|
||||
return await func()
|
||||
except exceptions.ArrAuthenticationException as ex:
|
||||
raise HomeAssistantError("Authentication failed for Sonarr") from ex
|
||||
except exceptions.ArrConnectionException as ex:
|
||||
raise HomeAssistantError("Failed to connect to Sonarr") from ex
|
||||
except exceptions.ArrException as ex:
|
||||
raise HomeAssistantError(f"Sonarr API error: {ex}") from ex
|
||||
|
||||
|
||||
async def _async_get_series(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get all Sonarr series."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
series_list = await _handle_api_errors(api_client.async_get_series)
|
||||
|
||||
base_url = entry.data[CONF_URL]
|
||||
shows = format_series(cast(list, series_list), base_url)
|
||||
|
||||
return {ATTR_SHOWS: shows}
|
||||
|
||||
|
||||
async def _async_get_episodes(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get episodes for a specific series."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
series_id: int = service.data[CONF_SERIES_ID]
|
||||
season_number: int | None = service.data.get(CONF_SEASON_NUMBER)
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
episodes = await _handle_api_errors(
|
||||
lambda: api_client.async_get_episodes(series_id, series=True)
|
||||
)
|
||||
|
||||
formatted_episodes = format_episodes(cast(list, episodes), season_number)
|
||||
|
||||
return {ATTR_EPISODES: formatted_episodes}
|
||||
|
||||
|
||||
async def _async_get_queue(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get Sonarr queue."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
max_items: int = service.data[CONF_MAX_ITEMS]
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
# 0 means no limit - use a large page size to get all items
|
||||
page_size = max_items if max_items > 0 else 10000
|
||||
queue = await _handle_api_errors(
|
||||
lambda: api_client.async_get_queue(
|
||||
page_size=page_size, include_series=True, include_episode=True
|
||||
)
|
||||
)
|
||||
|
||||
base_url = entry.data[CONF_URL]
|
||||
shows = format_queue(queue, base_url)
|
||||
|
||||
return {ATTR_SHOWS: shows}
|
||||
|
||||
|
||||
async def _async_get_diskspace(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get Sonarr diskspace information."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
space_unit: str = service.data[CONF_SPACE_UNIT]
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
disks = await _handle_api_errors(api_client.async_get_diskspace)
|
||||
|
||||
return {ATTR_DISKS: format_diskspace(disks, space_unit)}
|
||||
|
||||
|
||||
async def _async_get_upcoming(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get Sonarr upcoming episodes."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
days: int = service.data[CONF_DAYS]
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
|
||||
local = dt_util.start_of_local_day().replace(microsecond=0)
|
||||
start = dt_util.as_utc(local)
|
||||
end = start + timedelta(days=days)
|
||||
|
||||
calendar = await _handle_api_errors(
|
||||
lambda: api_client.async_get_calendar(
|
||||
start_date=start, end_date=end, include_series=True
|
||||
)
|
||||
)
|
||||
|
||||
base_url = entry.data[CONF_URL]
|
||||
episodes = format_upcoming(cast(list, calendar), base_url)
|
||||
|
||||
return {ATTR_EPISODES: episodes}
|
||||
|
||||
|
||||
async def _async_get_wanted(service: ServiceCall) -> dict[str, Any]:
|
||||
"""Get Sonarr wanted/missing episodes."""
|
||||
entry = _get_config_entry_from_service_data(service)
|
||||
max_items: int = service.data[CONF_MAX_ITEMS]
|
||||
|
||||
api_client = service.hass.data[DOMAIN][entry.entry_id]["status"].api_client
|
||||
# 0 means no limit - use a large page size to get all items
|
||||
page_size = max_items if max_items > 0 else 10000
|
||||
wanted = await _handle_api_errors(
|
||||
lambda: api_client.async_get_wanted(page_size=page_size, include_series=True)
|
||||
)
|
||||
|
||||
base_url = entry.data[CONF_URL]
|
||||
episodes = format_wanted(wanted, base_url)
|
||||
|
||||
return {ATTR_EPISODES: episodes}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for the Sonarr integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_SERIES,
|
||||
_async_get_series,
|
||||
schema=SERVICE_GET_SERIES_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_EPISODES,
|
||||
_async_get_episodes,
|
||||
schema=SERVICE_GET_EPISODES_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
_async_get_queue,
|
||||
schema=SERVICE_GET_QUEUE_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
_async_get_diskspace,
|
||||
schema=SERVICE_GET_DISKSPACE_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_UPCOMING,
|
||||
_async_get_upcoming,
|
||||
schema=SERVICE_GET_UPCOMING_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_WANTED,
|
||||
_async_get_wanted,
|
||||
schema=SERVICE_GET_WANTED_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
@@ -0,0 +1,100 @@
|
||||
get_series:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
|
||||
get_queue:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
max_items:
|
||||
required: false
|
||||
default: 0
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 500
|
||||
mode: box
|
||||
|
||||
get_diskspace:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
space_unit:
|
||||
required: false
|
||||
default: bytes
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- bytes
|
||||
- kb
|
||||
- kib
|
||||
- mb
|
||||
- mib
|
||||
- gb
|
||||
- gib
|
||||
- tb
|
||||
- tib
|
||||
- pb
|
||||
- pib
|
||||
|
||||
get_upcoming:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
days:
|
||||
required: false
|
||||
default: 1
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 30
|
||||
mode: box
|
||||
|
||||
get_wanted:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
max_items:
|
||||
required: false
|
||||
default: 0
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 500
|
||||
mode: box
|
||||
|
||||
get_episodes:
|
||||
fields:
|
||||
entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: sonarr
|
||||
series_id:
|
||||
required: true
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
mode: box
|
||||
season_number:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
mode: box
|
||||
@@ -51,6 +51,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Config entry for integration \"{target}\" not found."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "Config entry \"{target}\" is not loaded."
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
@@ -60,5 +68,91 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_diskspace": {
|
||||
"description": "Gets disk space information for all configured paths.",
|
||||
"fields": {
|
||||
"entry_id": {
|
||||
"description": "ID of the config entry to use.",
|
||||
"name": "Sonarr entry"
|
||||
},
|
||||
"space_unit": {
|
||||
"description": "Unit for space values. Use binary units (kib, mib, gib, tib, pib) for 1024-based values or decimal units (kb, mb, gb, tb, pb) for 1000-based values.",
|
||||
"name": "Space unit"
|
||||
}
|
||||
},
|
||||
"name": "Get disk space"
|
||||
},
|
||||
"get_episodes": {
|
||||
"description": "Gets episodes for a specific series.",
|
||||
"fields": {
|
||||
"entry_id": {
|
||||
"description": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::name%]"
|
||||
},
|
||||
"season_number": {
|
||||
"description": "Optional season number to filter episodes by.",
|
||||
"name": "Season number"
|
||||
},
|
||||
"series_id": {
|
||||
"description": "The ID of the series to get episodes for.",
|
||||
"name": "Series ID"
|
||||
}
|
||||
},
|
||||
"name": "Get episodes"
|
||||
},
|
||||
"get_queue": {
|
||||
"description": "Gets all episodes currently in the download queue with their progress and details.",
|
||||
"fields": {
|
||||
"entry_id": {
|
||||
"description": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::name%]"
|
||||
},
|
||||
"max_items": {
|
||||
"description": "Maximum number of items to return (0 = no limit).",
|
||||
"name": "Max items"
|
||||
}
|
||||
},
|
||||
"name": "Get queue"
|
||||
},
|
||||
"get_series": {
|
||||
"description": "Gets all series in Sonarr with their details and statistics.",
|
||||
"fields": {
|
||||
"entry_id": {
|
||||
"description": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Get series"
|
||||
},
|
||||
"get_upcoming": {
|
||||
"description": "Gets upcoming episodes from the calendar.",
|
||||
"fields": {
|
||||
"days": {
|
||||
"description": "Number of days to look ahead for upcoming episodes.",
|
||||
"name": "Days"
|
||||
},
|
||||
"entry_id": {
|
||||
"description": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Get upcoming"
|
||||
},
|
||||
"get_wanted": {
|
||||
"description": "Gets wanted/missing episodes that are being searched for.",
|
||||
"fields": {
|
||||
"entry_id": {
|
||||
"description": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_diskspace::fields::entry_id::name%]"
|
||||
},
|
||||
"max_items": {
|
||||
"description": "[%key:component::sonarr::services::get_queue::fields::max_items::description%]",
|
||||
"name": "[%key:component::sonarr::services::get_queue::fields::max_items::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Get wanted"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ from aiopyarr import (
|
||||
Command,
|
||||
Diskspace,
|
||||
SonarrCalendar,
|
||||
SonarrEpisode,
|
||||
SonarrQueue,
|
||||
SonarrSeries,
|
||||
SonarrWantedMissing,
|
||||
@@ -59,6 +60,19 @@ def sonarr_queue() -> SonarrQueue:
|
||||
return SonarrQueue(results)
|
||||
|
||||
|
||||
def sonarr_queue_season_pack() -> SonarrQueue:
|
||||
"""Generate a response for the queue method with a season pack."""
|
||||
results = json.loads(load_fixture("sonarr/queue_season_pack.json"))
|
||||
return SonarrQueue(results)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sonarr_season_pack(mock_sonarr: MagicMock) -> MagicMock:
|
||||
"""Return a mocked Sonarr client with season pack queue data."""
|
||||
mock_sonarr.async_get_queue.return_value = sonarr_queue_season_pack()
|
||||
return mock_sonarr
|
||||
|
||||
|
||||
def sonarr_series() -> list[SonarrSeries]:
|
||||
"""Generate a response for the series method."""
|
||||
results = json.loads(load_fixture("sonarr/series.json"))
|
||||
@@ -77,6 +91,12 @@ def sonarr_wanted() -> SonarrWantedMissing:
|
||||
return SonarrWantedMissing(results)
|
||||
|
||||
|
||||
def sonarr_episodes() -> list[SonarrEpisode]:
|
||||
"""Generate a response for the episodes method."""
|
||||
results = json.loads(load_fixture("sonarr/episodes.json"))
|
||||
return [SonarrEpisode(result) for result in results]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
@@ -118,6 +138,7 @@ def mock_sonarr_config_flow() -> Generator[MagicMock]:
|
||||
client.async_get_calendar.return_value = sonarr_calendar()
|
||||
client.async_get_commands.return_value = sonarr_commands()
|
||||
client.async_get_diskspace.return_value = sonarr_diskspace()
|
||||
client.async_get_episodes.return_value = sonarr_episodes()
|
||||
client.async_get_queue.return_value = sonarr_queue()
|
||||
client.async_get_series.return_value = sonarr_series()
|
||||
client.async_get_system_status.return_value = sonarr_system_status()
|
||||
@@ -136,6 +157,7 @@ def mock_sonarr() -> Generator[MagicMock]:
|
||||
client.async_get_calendar.return_value = sonarr_calendar()
|
||||
client.async_get_commands.return_value = sonarr_commands()
|
||||
client.async_get_diskspace.return_value = sonarr_diskspace()
|
||||
client.async_get_episodes.return_value = sonarr_episodes()
|
||||
client.async_get_queue.return_value = sonarr_queue()
|
||||
client.async_get_series.return_value = sonarr_series()
|
||||
client.async_get_system_status.return_value = sonarr_system_status()
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
[
|
||||
{
|
||||
"seriesId": 105,
|
||||
"tvdbId": 123456,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 1,
|
||||
"episodeNumber": 1,
|
||||
"title": "The New Housekeeper",
|
||||
"airDate": "1960-10-03",
|
||||
"airDateUtc": "1960-10-03T00:00:00Z",
|
||||
"overview": "Andy's housekeeper quits, and a new one arrives.",
|
||||
"hasFile": false,
|
||||
"monitored": true,
|
||||
"runtime": 25,
|
||||
"id": 1001
|
||||
},
|
||||
{
|
||||
"seriesId": 105,
|
||||
"tvdbId": 123457,
|
||||
"episodeFileId": 5001,
|
||||
"seasonNumber": 1,
|
||||
"episodeNumber": 2,
|
||||
"title": "The Manhunt",
|
||||
"airDate": "1960-10-10",
|
||||
"airDateUtc": "1960-10-10T00:00:00Z",
|
||||
"overview": "Andy leads a manhunt for an escaped convict.",
|
||||
"hasFile": true,
|
||||
"monitored": true,
|
||||
"runtime": 25,
|
||||
"id": 1002
|
||||
},
|
||||
{
|
||||
"seriesId": 105,
|
||||
"tvdbId": 123458,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 2,
|
||||
"episodeNumber": 1,
|
||||
"title": "Opie and the Bully",
|
||||
"airDate": "1961-10-02",
|
||||
"airDateUtc": "1961-10-02T00:00:00Z",
|
||||
"overview": "Opie is being bullied at school.",
|
||||
"hasFile": false,
|
||||
"monitored": true,
|
||||
"runtime": 25,
|
||||
"finaleType": "season",
|
||||
"id": 1003
|
||||
}
|
||||
]
|
||||
@@ -17,15 +17,18 @@
|
||||
"images": [
|
||||
{
|
||||
"coverType": "fanart",
|
||||
"url": "https://artworks.thetvdb.com/banners/fanart/original/77754-5.jpg"
|
||||
"url": "/MediaCover/17/fanart.jpg?lastWrite=637217160281262470",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/fanart/original/77754-5.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "banner",
|
||||
"url": "https://artworks.thetvdb.com/banners/graphical/77754-g.jpg"
|
||||
"url": "/MediaCover/17/banner.jpg?lastWrite=637217160301222320",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/graphical/77754-g.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "poster",
|
||||
"url": "https://artworks.thetvdb.com/banners/posters/77754-4.jpg"
|
||||
"url": "/MediaCover/17/poster.jpg?lastWrite=637217160322182160",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/posters/77754-4.jpg"
|
||||
}
|
||||
],
|
||||
"seasons": [
|
||||
|
||||
@@ -0,0 +1,246 @@
|
||||
{
|
||||
"page": 1,
|
||||
"pageSize": 10,
|
||||
"sortKey": "timeleft",
|
||||
"sortDirection": "ascending",
|
||||
"totalRecords": 3,
|
||||
"records": [
|
||||
{
|
||||
"series": {
|
||||
"title": "House",
|
||||
"sortTitle": "house",
|
||||
"seasonCount": 8,
|
||||
"status": "ended",
|
||||
"overview": "A medical drama.",
|
||||
"network": "FOX",
|
||||
"airTime": "21:00",
|
||||
"images": [
|
||||
{
|
||||
"coverType": "fanart",
|
||||
"url": "/MediaCover/64/fanart.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/fanart/original/73255-11.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "banner",
|
||||
"url": "/MediaCover/64/banner.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/graphical/73255-g7.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "poster",
|
||||
"url": "/MediaCover/64/poster.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/series/73255/posters/230801.jpg"
|
||||
}
|
||||
],
|
||||
"year": 2004,
|
||||
"path": "/data/tv/House",
|
||||
"monitored": true,
|
||||
"tvdbId": 73255,
|
||||
"imdbId": "tt0412142",
|
||||
"id": 64
|
||||
},
|
||||
"episode": {
|
||||
"seriesId": 64,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 2,
|
||||
"episodeNumber": 1,
|
||||
"title": "Acceptance",
|
||||
"airDate": "2005-09-13",
|
||||
"airDateUtc": "2005-09-14T01:00:00Z",
|
||||
"overview": "A death row inmate is felled by an unknown disease.",
|
||||
"hasFile": false,
|
||||
"monitored": true,
|
||||
"absoluteEpisodeNumber": 24,
|
||||
"unverifiedSceneNumbering": false,
|
||||
"id": 2303
|
||||
},
|
||||
"quality": {
|
||||
"quality": {
|
||||
"id": 7,
|
||||
"name": "Bluray-1080p"
|
||||
},
|
||||
"revision": {
|
||||
"version": 1,
|
||||
"real": 0
|
||||
}
|
||||
},
|
||||
"size": 84429221268,
|
||||
"title": "House.S02.1080p.BluRay.x264-SHORTBREHD",
|
||||
"sizeleft": 83819785620,
|
||||
"timeleft": "00:00:00",
|
||||
"estimatedCompletionTime": "2026-02-05T22:46:52.440104Z",
|
||||
"status": "paused",
|
||||
"trackedDownloadStatus": "ok",
|
||||
"trackedDownloadState": "downloading",
|
||||
"statusMessages": [],
|
||||
"downloadId": "CA4552774085F1B5DB3C8E7D39DD220B0474FE4B",
|
||||
"protocol": "torrent",
|
||||
"downloadClient": "qBittorrent",
|
||||
"indexer": "LST (Prowlarr)",
|
||||
"episodeHasFile": false,
|
||||
"languages": [{ "id": 1, "name": "English" }],
|
||||
"customFormatScore": 0,
|
||||
"seriesId": 64,
|
||||
"episodeId": 2303,
|
||||
"seasonNumber": 2,
|
||||
"id": 1462284976
|
||||
},
|
||||
{
|
||||
"series": {
|
||||
"title": "House",
|
||||
"sortTitle": "house",
|
||||
"seasonCount": 8,
|
||||
"status": "ended",
|
||||
"overview": "A medical drama.",
|
||||
"network": "FOX",
|
||||
"airTime": "21:00",
|
||||
"images": [
|
||||
{
|
||||
"coverType": "fanart",
|
||||
"url": "/MediaCover/64/fanart.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/fanart/original/73255-11.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "banner",
|
||||
"url": "/MediaCover/64/banner.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/graphical/73255-g7.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "poster",
|
||||
"url": "/MediaCover/64/poster.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/series/73255/posters/230801.jpg"
|
||||
}
|
||||
],
|
||||
"year": 2004,
|
||||
"path": "/data/tv/House",
|
||||
"monitored": true,
|
||||
"tvdbId": 73255,
|
||||
"imdbId": "tt0412142",
|
||||
"id": 64
|
||||
},
|
||||
"episode": {
|
||||
"seriesId": 64,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 2,
|
||||
"episodeNumber": 2,
|
||||
"title": "Autopsy",
|
||||
"airDate": "2005-09-20",
|
||||
"airDateUtc": "2005-09-21T01:00:00Z",
|
||||
"overview": "Dr. Wilson convinces House to take a case.",
|
||||
"hasFile": false,
|
||||
"monitored": true,
|
||||
"absoluteEpisodeNumber": 25,
|
||||
"unverifiedSceneNumbering": false,
|
||||
"id": 2304
|
||||
},
|
||||
"quality": {
|
||||
"quality": {
|
||||
"id": 7,
|
||||
"name": "Bluray-1080p"
|
||||
},
|
||||
"revision": {
|
||||
"version": 1,
|
||||
"real": 0
|
||||
}
|
||||
},
|
||||
"size": 84429221268,
|
||||
"title": "House.S02.1080p.BluRay.x264-SHORTBREHD",
|
||||
"sizeleft": 83819785620,
|
||||
"timeleft": "00:00:00",
|
||||
"estimatedCompletionTime": "2026-02-05T22:46:52.440104Z",
|
||||
"status": "paused",
|
||||
"trackedDownloadStatus": "ok",
|
||||
"trackedDownloadState": "downloading",
|
||||
"statusMessages": [],
|
||||
"downloadId": "CA4552774085F1B5DB3C8E7D39DD220B0474FE4B",
|
||||
"protocol": "torrent",
|
||||
"downloadClient": "qBittorrent",
|
||||
"indexer": "LST (Prowlarr)",
|
||||
"episodeHasFile": false,
|
||||
"languages": [{ "id": 1, "name": "English" }],
|
||||
"customFormatScore": 0,
|
||||
"seriesId": 64,
|
||||
"episodeId": 2304,
|
||||
"seasonNumber": 2,
|
||||
"id": 1566152913
|
||||
},
|
||||
{
|
||||
"series": {
|
||||
"title": "House",
|
||||
"sortTitle": "house",
|
||||
"seasonCount": 8,
|
||||
"status": "ended",
|
||||
"overview": "A medical drama.",
|
||||
"network": "FOX",
|
||||
"airTime": "21:00",
|
||||
"images": [
|
||||
{
|
||||
"coverType": "fanart",
|
||||
"url": "/MediaCover/64/fanart.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/fanart/original/73255-11.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "banner",
|
||||
"url": "/MediaCover/64/banner.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/graphical/73255-g7.jpg"
|
||||
},
|
||||
{
|
||||
"coverType": "poster",
|
||||
"url": "/MediaCover/64/poster.jpg",
|
||||
"remoteUrl": "https://artworks.thetvdb.com/banners/series/73255/posters/230801.jpg"
|
||||
}
|
||||
],
|
||||
"year": 2004,
|
||||
"path": "/data/tv/House",
|
||||
"monitored": true,
|
||||
"tvdbId": 73255,
|
||||
"imdbId": "tt0412142",
|
||||
"id": 64
|
||||
},
|
||||
"episode": {
|
||||
"seriesId": 64,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 2,
|
||||
"episodeNumber": 24,
|
||||
"title": "No Reason",
|
||||
"airDate": "2006-05-23",
|
||||
"airDateUtc": "2006-05-24T01:00:00Z",
|
||||
"overview": "House finds himself in a fight for his life.",
|
||||
"hasFile": false,
|
||||
"monitored": true,
|
||||
"absoluteEpisodeNumber": 47,
|
||||
"unverifiedSceneNumbering": false,
|
||||
"id": 2326
|
||||
},
|
||||
"quality": {
|
||||
"quality": {
|
||||
"id": 7,
|
||||
"name": "Bluray-1080p"
|
||||
},
|
||||
"revision": {
|
||||
"version": 1,
|
||||
"real": 0
|
||||
}
|
||||
},
|
||||
"size": 84429221268,
|
||||
"title": "House.S02.1080p.BluRay.x264-SHORTBREHD",
|
||||
"sizeleft": 83819785620,
|
||||
"timeleft": "00:00:00",
|
||||
"estimatedCompletionTime": "2026-02-05T22:46:52.440104Z",
|
||||
"status": "paused",
|
||||
"trackedDownloadStatus": "ok",
|
||||
"trackedDownloadState": "downloading",
|
||||
"statusMessages": [],
|
||||
"downloadId": "CA4552774085F1B5DB3C8E7D39DD220B0474FE4B",
|
||||
"protocol": "torrent",
|
||||
"downloadClient": "qBittorrent",
|
||||
"indexer": "LST (Prowlarr)",
|
||||
"episodeHasFile": false,
|
||||
"languages": [{ "id": 1, "name": "English" }],
|
||||
"customFormatScore": 0,
|
||||
"seriesId": 64,
|
||||
"episodeId": 2326,
|
||||
"seasonNumber": 2,
|
||||
"id": 1634887132
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
# serializer version: 1
|
||||
# name: test_service_get_diskspace
|
||||
dict({
|
||||
'disks': dict({
|
||||
'C:\\': dict({
|
||||
'free_space': 282500067328.0,
|
||||
'label': '',
|
||||
'path': 'C:\\',
|
||||
'total_space': 499738734592.0,
|
||||
'unit': 'bytes',
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_service_get_episodes
|
||||
dict({
|
||||
'episodes': dict({
|
||||
'S01E01': dict({
|
||||
'air_date': '1960-10-03 00:00:00',
|
||||
'air_date_utc': '1960-10-03 00:00:00+00:00',
|
||||
'episode_file_id': 0,
|
||||
'episode_identifier': 'S01E01',
|
||||
'episode_number': 1,
|
||||
'has_file': False,
|
||||
'id': 1001,
|
||||
'monitored': True,
|
||||
'overview': "Andy's housekeeper quits, and a new one arrives.",
|
||||
'runtime': 25,
|
||||
'season_number': 1,
|
||||
'series_id': 105,
|
||||
'title': 'The New Housekeeper',
|
||||
'tvdb_id': 123456,
|
||||
}),
|
||||
'S01E02': dict({
|
||||
'air_date': '1960-10-10 00:00:00',
|
||||
'air_date_utc': '1960-10-10 00:00:00+00:00',
|
||||
'episode_file_id': 5001,
|
||||
'episode_identifier': 'S01E02',
|
||||
'episode_number': 2,
|
||||
'has_file': True,
|
||||
'id': 1002,
|
||||
'monitored': True,
|
||||
'overview': 'Andy leads a manhunt for an escaped convict.',
|
||||
'runtime': 25,
|
||||
'season_number': 1,
|
||||
'series_id': 105,
|
||||
'title': 'The Manhunt',
|
||||
'tvdb_id': 123457,
|
||||
}),
|
||||
'S02E01': dict({
|
||||
'air_date': '1961-10-02 00:00:00',
|
||||
'air_date_utc': '1961-10-02 00:00:00+00:00',
|
||||
'episode_file_id': 0,
|
||||
'episode_identifier': 'S02E01',
|
||||
'episode_number': 1,
|
||||
'finale_type': 'season',
|
||||
'has_file': False,
|
||||
'id': 1003,
|
||||
'monitored': True,
|
||||
'overview': 'Opie is being bullied at school.',
|
||||
'runtime': 25,
|
||||
'season_number': 2,
|
||||
'series_id': 105,
|
||||
'title': 'Opie and the Bully',
|
||||
'tvdb_id': 123458,
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_service_get_queue
|
||||
dict({
|
||||
'shows': dict({
|
||||
'The.Andy.Griffith.Show.S01E01.x264-GROUP': dict({
|
||||
'download_client': None,
|
||||
'download_id': 'SABnzbd_nzo_Mq2f_b',
|
||||
'download_title': 'The.Andy.Griffith.Show.S01E01.x264-GROUP',
|
||||
'episode_has_file': None,
|
||||
'episode_id': None,
|
||||
'episode_number': 1,
|
||||
'episode_title': 'The New Housekeeper',
|
||||
'estimated_completion_time': '2016-02-05 22:46:52.440104',
|
||||
'id': 1503378561,
|
||||
'images': dict({
|
||||
'banner': 'https://artworks.thetvdb.com/banners/graphical/77754-g.jpg',
|
||||
'fanart': 'https://artworks.thetvdb.com/banners/fanart/original/77754-5.jpg',
|
||||
'poster': 'https://artworks.thetvdb.com/banners/posters/77754-4.jpg',
|
||||
}),
|
||||
'indexer': None,
|
||||
'progress': '100.00%',
|
||||
'protocol': 'ProtocolType.USENET',
|
||||
'quality': 'SD',
|
||||
'season_number': None,
|
||||
'series_id': None,
|
||||
'size': 4472186820,
|
||||
'size_left': 0,
|
||||
'status': 'Downloading',
|
||||
'time_left': '00:00:00',
|
||||
'title': 'The Andy Griffith Show',
|
||||
'tracked_download_state': 'downloading',
|
||||
'tracked_download_status': 'Ok',
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_service_get_series
|
||||
dict({
|
||||
'shows': dict({
|
||||
'The Andy Griffith Show': dict({
|
||||
'episode_count': 0,
|
||||
'episode_file_count': 0,
|
||||
'episodes_info': '0/0 Episodes',
|
||||
'id': 105,
|
||||
'images': dict({
|
||||
'banner': 'https://artworks.thetvdb.com/banners/graphical/77754-g.jpg',
|
||||
'fanart': 'https://artworks.thetvdb.com/banners/fanart/original/77754-5.jpg',
|
||||
'poster': 'https://artworks.thetvdb.com/banners/posters/77754-1.jpg',
|
||||
}),
|
||||
'imdb_id': 'tt0053479',
|
||||
'monitored': True,
|
||||
'status': 'ended',
|
||||
'tvdb_id': 77754,
|
||||
'year': 1960,
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_service_get_upcoming
|
||||
dict({
|
||||
'episodes': dict({
|
||||
"Bob's Burgers S04E11": dict({
|
||||
'air_date': '2014-01-26 00:00:00',
|
||||
'air_date_utc': '2014-01-27 01:30:00+00:00',
|
||||
'episode_identifier': 'S04E11',
|
||||
'episode_number': 11,
|
||||
'finale_type': None,
|
||||
'has_file': False,
|
||||
'id': 14402,
|
||||
'images': dict({
|
||||
'banner': 'http://192.168.1.189:8989http://slurm.trakt.us/images/banners/1387.6.jpg',
|
||||
'fanart': 'http://192.168.1.189:8989http://slurm.trakt.us/images/fanart/1387.6.jpg',
|
||||
'poster': 'http://192.168.1.189:8989http://slurm.trakt.us/images/posters/1387.6-300.jpg',
|
||||
}),
|
||||
'monitored': True,
|
||||
'network': 'FOX',
|
||||
'overview': 'To compete with fellow "restaurateur," Jimmy Pesto, and his blowout Super Bowl event, Bob is determined to create a Bob\'s Burgers commercial to air during the "big game." In an effort to outshine Pesto, the Belchers recruit Randy, a documentarian, to assist with the filmmaking and hire on former pro football star Connie Frye to be the celebrity endorser.',
|
||||
'runtime': None,
|
||||
'season_number': 4,
|
||||
'series_id': 3,
|
||||
'series_imdb_id': 'tt1561755',
|
||||
'series_status': 'continuing',
|
||||
'series_title': "Bob's Burgers",
|
||||
'series_tvdb_id': 194031,
|
||||
'series_year': 2011,
|
||||
'title': 'Easy Com-mercial, Easy Go-mercial',
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_service_get_wanted
|
||||
dict({
|
||||
'episodes': dict({
|
||||
"Bob's Burgers S04E11": dict({
|
||||
'air_date': '2014-01-26 00:00:00',
|
||||
'air_date_utc': '2014-01-27 01:30:00+00:00',
|
||||
'episode_identifier': 'S04E11',
|
||||
'episode_number': 11,
|
||||
'has_file': False,
|
||||
'id': 14402,
|
||||
'images': dict({
|
||||
'banner': 'http://192.168.1.189:8989http://slurm.trakt.us/images/banners/1387.6.jpg',
|
||||
'fanart': 'http://192.168.1.189:8989http://slurm.trakt.us/images/fanart/1387.6.jpg',
|
||||
'poster': 'http://192.168.1.189:8989http://slurm.trakt.us/images/posters/1387.6-300.jpg',
|
||||
}),
|
||||
'monitored': True,
|
||||
'network': 'FOX',
|
||||
'overview': 'To compete with fellow "restaurateur," Jimmy Pesto, and his blowout Super Bowl event, Bob is determined to create a Bob\'s Burgers commercial to air during the "big game." In an effort to outshine Pesto, the Belchers recruit Randy, a documentarian, to assist with the filmmaking and hire on former pro football star Connie Frye to be the celebrity endorser.',
|
||||
'runtime': None,
|
||||
'season_number': 4,
|
||||
'series_id': 3,
|
||||
'series_imdb_id': 'tt1561755',
|
||||
'series_status': 'continuing',
|
||||
'series_title': "Bob's Burgers",
|
||||
'series_tvdb_id': 194031,
|
||||
'series_year': 2011,
|
||||
'title': 'Easy Com-mercial, Easy Go-mercial',
|
||||
'tvdb_id': None,
|
||||
}),
|
||||
'The Andy Griffith Show S01E01': dict({
|
||||
'air_date': '1960-10-03 00:00:00',
|
||||
'air_date_utc': '1960-10-03 01:00:00+00:00',
|
||||
'episode_identifier': 'S01E01',
|
||||
'episode_number': 1,
|
||||
'has_file': False,
|
||||
'id': 889,
|
||||
'images': dict({
|
||||
'banner': 'http://192.168.1.189:8989https://artworks.thetvdb.com/banners/graphical/77754-g.jpg',
|
||||
'fanart': 'http://192.168.1.189:8989https://artworks.thetvdb.com/banners/fanart/original/77754-5.jpg',
|
||||
'poster': 'http://192.168.1.189:8989https://artworks.thetvdb.com/banners/posters/77754-4.jpg',
|
||||
}),
|
||||
'monitored': True,
|
||||
'network': 'CBS',
|
||||
'overview': "Sheriff Andy Taylor and his young son Opie are in need of a new housekeeper. Andy's Aunt Bee looks like the perfect candidate and moves in, but her presence causes friction with Opie.",
|
||||
'runtime': None,
|
||||
'season_number': 1,
|
||||
'series_id': 17,
|
||||
'series_imdb_id': '',
|
||||
'series_status': 'ended',
|
||||
'series_title': 'The Andy Griffith Show',
|
||||
'series_tvdb_id': 77754,
|
||||
'series_year': 1960,
|
||||
'title': 'The New Housekeeper',
|
||||
'tvdb_id': None,
|
||||
}),
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
@@ -70,16 +70,11 @@ async def test_unload_config_entry(
|
||||
"""Test the configuration entry unloading."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sonarr.sensor.async_setup_entry",
|
||||
return_value=True,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.data[DOMAIN]
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
assert mock_config_entry.entry_id in hass.data[DOMAIN]
|
||||
assert hass.data[DOMAIN][mock_config_entry.entry_id] is not None
|
||||
|
||||
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -55,35 +55,26 @@ async def test_sensors(
|
||||
state = hass.states.get("sensor.sonarr_disk_space")
|
||||
assert state
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfInformation.GIGABYTES
|
||||
assert state.attributes.get("C:\\") == "263.10/465.42GB (56.53%)"
|
||||
assert state.state == "263.10"
|
||||
|
||||
state = hass.states.get("sensor.sonarr_queue")
|
||||
assert state
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "episodes"
|
||||
assert state.attributes.get("The Andy Griffith Show S01E01") == "100.00%"
|
||||
assert state.state == "1"
|
||||
|
||||
state = hass.states.get("sensor.sonarr_shows")
|
||||
assert state
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "series"
|
||||
assert state.attributes.get("The Andy Griffith Show") == "0/0 Episodes"
|
||||
assert state.state == "1"
|
||||
|
||||
state = hass.states.get("sensor.sonarr_upcoming")
|
||||
assert state
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "episodes"
|
||||
assert state.attributes.get("Bob's Burgers") == "S04E11"
|
||||
assert state.state == "1"
|
||||
|
||||
state = hass.states.get("sensor.sonarr_wanted")
|
||||
assert state
|
||||
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "episodes"
|
||||
assert state.attributes.get("Bob's Burgers S04E11") == "2014-01-26T17:30:00-08:00"
|
||||
assert (
|
||||
state.attributes.get("The Andy Griffith Show S01E01")
|
||||
== "1960-10-02T17:00:00-08:00"
|
||||
)
|
||||
assert state.state == "2"
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,620 @@
|
||||
"""Tests for Sonarr services."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from aiopyarr import (
|
||||
ArrAuthenticationException,
|
||||
ArrConnectionException,
|
||||
Diskspace,
|
||||
SonarrQueue,
|
||||
)
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.sonarr.const import (
|
||||
ATTR_DISKS,
|
||||
ATTR_ENTRY_ID,
|
||||
ATTR_EPISODES,
|
||||
ATTR_SHOWS,
|
||||
DOMAIN,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
SERVICE_GET_EPISODES,
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_GET_SERIES,
|
||||
SERVICE_GET_UPCOMING,
|
||||
SERVICE_GET_WANTED,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"service",
|
||||
[
|
||||
SERVICE_GET_SERIES,
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
SERVICE_GET_UPCOMING,
|
||||
SERVICE_GET_WANTED,
|
||||
],
|
||||
)
|
||||
async def test_services_config_entry_not_loaded_state(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
service: str,
|
||||
) -> None:
|
||||
"""Test service call when config entry is in failed state."""
|
||||
# Create a second config entry that's not loaded
|
||||
unloaded_entry = MockConfigEntry(
|
||||
title="Sonarr",
|
||||
domain=DOMAIN,
|
||||
unique_id="unloaded",
|
||||
)
|
||||
unloaded_entry.add_to_hass(hass)
|
||||
|
||||
assert unloaded_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTRY_ID: unloaded_entry.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert exc_info.value.translation_key == "not_loaded"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"service",
|
||||
[
|
||||
SERVICE_GET_SERIES,
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
SERVICE_GET_UPCOMING,
|
||||
SERVICE_GET_WANTED,
|
||||
],
|
||||
)
|
||||
async def test_services_integration_not_found(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
service: str,
|
||||
) -> None:
|
||||
"""Test service call with non-existent config entry."""
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTRY_ID: "non_existent_entry_id"},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert exc_info.value.translation_key == "integration_not_found"
|
||||
|
||||
|
||||
async def test_service_get_series(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_series service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_SERIES,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_SHOWS]) == 1
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
async def test_service_get_queue(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_queue service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_SHOWS]) == 1
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"service",
|
||||
[
|
||||
SERVICE_GET_SERIES,
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
SERVICE_GET_UPCOMING,
|
||||
SERVICE_GET_WANTED,
|
||||
],
|
||||
)
|
||||
async def test_services_entry_not_loaded(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
service: str,
|
||||
) -> None:
|
||||
"""Test services with unloaded config entry."""
|
||||
# Unload the entry
|
||||
await hass.config_entries.async_unload(init_integration.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with pytest.raises(ServiceValidationError) as exc_info:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert exc_info.value.translation_key == "not_loaded"
|
||||
|
||||
|
||||
async def test_service_get_queue_empty(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_sonarr: MagicMock,
|
||||
) -> None:
|
||||
"""Test get_queue service with empty queue."""
|
||||
# Mock empty queue response
|
||||
mock_sonarr.async_get_queue.return_value = SonarrQueue(
|
||||
{
|
||||
"page": 1,
|
||||
"pageSize": 10,
|
||||
"sortKey": "timeleft",
|
||||
"sortDirection": "ascending",
|
||||
"totalRecords": 0,
|
||||
"records": [],
|
||||
}
|
||||
)
|
||||
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert ATTR_SHOWS in response
|
||||
shows = response[ATTR_SHOWS]
|
||||
assert isinstance(shows, dict)
|
||||
assert len(shows) == 0
|
||||
|
||||
|
||||
async def test_service_get_diskspace(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_diskspace service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_DISKS]) == 1
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
async def test_service_get_diskspace_multiple_drives(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_sonarr: MagicMock,
|
||||
) -> None:
|
||||
"""Test get_diskspace service with multiple drives."""
|
||||
# Mock multiple disks response
|
||||
mock_sonarr.async_get_diskspace.return_value = [
|
||||
Diskspace(
|
||||
{
|
||||
"path": "C:\\",
|
||||
"label": "System",
|
||||
"freeSpace": 100000000000,
|
||||
"totalSpace": 500000000000,
|
||||
}
|
||||
),
|
||||
Diskspace(
|
||||
{
|
||||
"path": "D:\\Media",
|
||||
"label": "Media Storage",
|
||||
"freeSpace": 2000000000000,
|
||||
"totalSpace": 4000000000000,
|
||||
}
|
||||
),
|
||||
Diskspace(
|
||||
{
|
||||
"path": "/mnt/nas",
|
||||
"label": "NAS",
|
||||
"freeSpace": 10000000000000,
|
||||
"totalSpace": 20000000000000,
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_DISKSPACE,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert ATTR_DISKS in response
|
||||
disks = response[ATTR_DISKS]
|
||||
assert isinstance(disks, dict)
|
||||
assert len(disks) == 3
|
||||
|
||||
# Check first disk (C:\)
|
||||
c_drive = disks["C:\\"]
|
||||
assert c_drive["path"] == "C:\\"
|
||||
assert c_drive["label"] == "System"
|
||||
assert c_drive["free_space"] == 100000000000
|
||||
assert c_drive["total_space"] == 500000000000
|
||||
assert c_drive["unit"] == "bytes"
|
||||
|
||||
# Check second disk (D:\Media)
|
||||
d_drive = disks["D:\\Media"]
|
||||
assert d_drive["path"] == "D:\\Media"
|
||||
assert d_drive["label"] == "Media Storage"
|
||||
assert d_drive["free_space"] == 2000000000000
|
||||
assert d_drive["total_space"] == 4000000000000
|
||||
|
||||
# Check third disk (/mnt/nas)
|
||||
nas = disks["/mnt/nas"]
|
||||
assert nas["path"] == "/mnt/nas"
|
||||
assert nas["label"] == "NAS"
|
||||
assert nas["free_space"] == 10000000000000
|
||||
assert nas["total_space"] == 20000000000000
|
||||
|
||||
|
||||
async def test_service_get_upcoming(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_upcoming service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_UPCOMING,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_EPISODES]) == 1
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
async def test_service_get_wanted(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_wanted service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_WANTED,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_EPISODES]) == 2
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
async def test_service_get_episodes(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test get_episodes service."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_EPISODES,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id, "series_id": 105},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
# Explicit assertion for specific behavior
|
||||
assert len(response[ATTR_EPISODES]) == 3
|
||||
|
||||
# Snapshot for full structure validation
|
||||
assert response == snapshot
|
||||
|
||||
|
||||
async def test_service_get_episodes_with_season_filter(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test get_episodes service with season filter."""
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_EPISODES,
|
||||
{
|
||||
ATTR_ENTRY_ID: init_integration.entry_id,
|
||||
"series_id": 105,
|
||||
"season_number": 1,
|
||||
},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert ATTR_EPISODES in response
|
||||
episodes = response[ATTR_EPISODES]
|
||||
assert isinstance(episodes, dict)
|
||||
# Should only have season 1 episodes (2 of them)
|
||||
assert len(episodes) == 2
|
||||
assert "S01E01" in episodes
|
||||
assert "S01E02" in episodes
|
||||
assert "S02E01" not in episodes
|
||||
|
||||
|
||||
async def test_service_get_queue_image_fallback(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_sonarr: MagicMock,
|
||||
) -> None:
|
||||
"""Test that get_queue uses url fallback when remoteUrl is not available."""
|
||||
# Mock queue response with images that only have 'url' (no 'remoteUrl')
|
||||
mock_sonarr.async_get_queue.return_value = SonarrQueue(
|
||||
{
|
||||
"page": 1,
|
||||
"pageSize": 10,
|
||||
"sortKey": "timeleft",
|
||||
"sortDirection": "ascending",
|
||||
"totalRecords": 1,
|
||||
"records": [
|
||||
{
|
||||
"series": {
|
||||
"title": "Test Series",
|
||||
"sortTitle": "test series",
|
||||
"seasonCount": 1,
|
||||
"status": "continuing",
|
||||
"overview": "A test series.",
|
||||
"network": "Test Network",
|
||||
"airTime": "20:00",
|
||||
"images": [
|
||||
{
|
||||
"coverType": "fanart",
|
||||
"url": "/MediaCover/1/fanart.jpg?lastWrite=123456",
|
||||
},
|
||||
{
|
||||
"coverType": "poster",
|
||||
"url": "/MediaCover/1/poster.jpg?lastWrite=123456",
|
||||
},
|
||||
],
|
||||
"seasons": [{"seasonNumber": 1, "monitored": True}],
|
||||
"year": 2024,
|
||||
"path": "/tv/Test Series",
|
||||
"profileId": 1,
|
||||
"seasonFolder": True,
|
||||
"monitored": True,
|
||||
"useSceneNumbering": False,
|
||||
"runtime": 45,
|
||||
"tvdbId": 12345,
|
||||
"tvRageId": 0,
|
||||
"tvMazeId": 0,
|
||||
"firstAired": "2024-01-01T00:00:00Z",
|
||||
"lastInfoSync": "2024-01-01T00:00:00Z",
|
||||
"seriesType": "standard",
|
||||
"cleanTitle": "testseries",
|
||||
"imdbId": "tt1234567",
|
||||
"titleSlug": "test-series",
|
||||
"certification": "TV-14",
|
||||
"genres": ["Drama"],
|
||||
"tags": [],
|
||||
"added": "2024-01-01T00:00:00Z",
|
||||
"ratings": {"votes": 100, "value": 8.0},
|
||||
"qualityProfileId": 1,
|
||||
"id": 1,
|
||||
},
|
||||
"episode": {
|
||||
"seriesId": 1,
|
||||
"episodeFileId": 0,
|
||||
"seasonNumber": 1,
|
||||
"episodeNumber": 1,
|
||||
"title": "Pilot",
|
||||
"airDate": "2024-01-01",
|
||||
"airDateUtc": "2024-01-01T00:00:00Z",
|
||||
"overview": "The pilot episode.",
|
||||
"hasFile": False,
|
||||
"monitored": True,
|
||||
"absoluteEpisodeNumber": 1,
|
||||
"unverifiedSceneNumbering": False,
|
||||
"id": 1,
|
||||
},
|
||||
"quality": {
|
||||
"quality": {"id": 3, "name": "WEBDL-1080p"},
|
||||
"revision": {"version": 1, "real": 0},
|
||||
},
|
||||
"size": 1000000000,
|
||||
"title": "Test.Series.S01E01.1080p.WEB-DL",
|
||||
"sizeleft": 500000000,
|
||||
"timeleft": "00:10:00",
|
||||
"estimatedCompletionTime": "2024-01-01T01:00:00Z",
|
||||
"status": "Downloading",
|
||||
"trackedDownloadStatus": "Ok",
|
||||
"statusMessages": [],
|
||||
"downloadId": "test123",
|
||||
"protocol": "torrent",
|
||||
"id": 1,
|
||||
}
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert ATTR_SHOWS in response
|
||||
shows = response[ATTR_SHOWS]
|
||||
assert len(shows) == 1
|
||||
|
||||
queue_item = shows["Test.Series.S01E01.1080p.WEB-DL"]
|
||||
assert "images" in queue_item
|
||||
|
||||
# Since remoteUrl is not available, the fallback should use base_url + url
|
||||
# The base_url from mock_config_entry is http://192.168.1.189:8989
|
||||
assert "fanart" in queue_item["images"]
|
||||
assert "poster" in queue_item["images"]
|
||||
# Check that the fallback constructed the URL with base_url prefix
|
||||
assert queue_item["images"]["fanart"] == (
|
||||
"http://192.168.1.189:8989/MediaCover/1/fanart.jpg?lastWrite=123456"
|
||||
)
|
||||
assert queue_item["images"]["poster"] == (
|
||||
"http://192.168.1.189:8989/MediaCover/1/poster.jpg?lastWrite=123456"
|
||||
)
|
||||
|
||||
|
||||
async def test_service_get_queue_season_pack(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_sonarr_season_pack: MagicMock,
|
||||
) -> None:
|
||||
"""Test get_queue service with a season pack download."""
|
||||
# Set up integration with season pack queue data
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
{ATTR_ENTRY_ID: mock_config_entry.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert ATTR_SHOWS in response
|
||||
shows = response[ATTR_SHOWS]
|
||||
|
||||
# Should have only 1 entry (the season pack) instead of 3 (one per episode)
|
||||
assert len(shows) == 1
|
||||
|
||||
# Check the season pack data structure
|
||||
season_pack = shows["House.S02.1080p.BluRay.x264-SHORTBREHD"]
|
||||
assert season_pack["title"] == "House"
|
||||
assert season_pack["season_number"] == 2
|
||||
assert season_pack["download_title"] == "House.S02.1080p.BluRay.x264-SHORTBREHD"
|
||||
|
||||
# Check season pack specific fields
|
||||
assert season_pack["is_season_pack"] is True
|
||||
assert season_pack["episode_count"] == 3 # Episodes 1, 2, and 24 in fixture
|
||||
assert season_pack["episode_range"] == "E01-E24"
|
||||
assert season_pack["episode_identifier"] == "S02 (3 episodes)"
|
||||
|
||||
# Check that basic download info is still present
|
||||
assert season_pack["size"] == 84429221268
|
||||
assert season_pack["status"] == "paused"
|
||||
assert season_pack["quality"] == "Bluray-1080p"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "method"),
|
||||
[
|
||||
(SERVICE_GET_SERIES, "async_get_series"),
|
||||
(SERVICE_GET_QUEUE, "async_get_queue"),
|
||||
(SERVICE_GET_DISKSPACE, "async_get_diskspace"),
|
||||
(SERVICE_GET_UPCOMING, "async_get_calendar"),
|
||||
(SERVICE_GET_WANTED, "async_get_wanted"),
|
||||
],
|
||||
)
|
||||
async def test_services_api_connection_error(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_sonarr: MagicMock,
|
||||
service: str,
|
||||
method: str,
|
||||
) -> None:
|
||||
"""Test services with API connection error."""
|
||||
# Configure the mock to raise an exception
|
||||
getattr(mock_sonarr, method).side_effect = ArrConnectionException(
|
||||
"Connection failed"
|
||||
)
|
||||
|
||||
with pytest.raises(HomeAssistantError, match="Failed to connect to Sonarr"):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "method"),
|
||||
[
|
||||
(SERVICE_GET_SERIES, "async_get_series"),
|
||||
(SERVICE_GET_QUEUE, "async_get_queue"),
|
||||
(SERVICE_GET_DISKSPACE, "async_get_diskspace"),
|
||||
(SERVICE_GET_UPCOMING, "async_get_calendar"),
|
||||
(SERVICE_GET_WANTED, "async_get_wanted"),
|
||||
],
|
||||
)
|
||||
async def test_services_api_auth_error(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
mock_sonarr: MagicMock,
|
||||
service: str,
|
||||
method: str,
|
||||
) -> None:
|
||||
"""Test services with API authentication error."""
|
||||
# Configure the mock to raise an exception
|
||||
getattr(mock_sonarr, method).side_effect = ArrAuthenticationException(
|
||||
"Authentication failed"
|
||||
)
|
||||
|
||||
with pytest.raises(HomeAssistantError, match="Authentication failed for Sonarr"):
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTRY_ID: init_integration.entry_id},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
Reference in New Issue
Block a user