From 7ea801eb02269e283cee2f73e3da94659f1d1b99 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Tue, 31 Mar 2026 19:25:20 +0200 Subject: [PATCH] Add dedicated stats coordinator for container monitoring Extract all container stats fetching (CPU, memory) from the main and add-on coordinators into a new HassioStatsDataUpdateCoordinator that polls at 60-second intervals. This creates a clean three-coordinator architecture: - Main coordinator (5min): Core/Supervisor/OS/Host version and info - Add-on coordinator (15min): Add-on list, per-addon detailed info - Stats coordinator (60s): Container stats for Core, Supervisor, and add-ons (only when stats entities are enabled) Introduces HassioStatsEntity as a generic base for all container stats sensors, replacing the per-component stats handling that was spread across HassioCoreEntity, HassioSupervisorEntity and HassioAddonEntity. The add-on coordinator's subscription mechanism is renamed from _container_updates to _addon_info_subscriptions to better reflect its now single-purpose nature. Co-Authored-By: Claude Opus 4.6 (1M context) --- homeassistant/components/hassio/__init__.py | 7 + homeassistant/components/hassio/const.py | 2 + .../components/hassio/coordinator.py | 242 ++++++++++-------- .../components/hassio/diagnostics.py | 10 +- homeassistant/components/hassio/entity.py | 94 ++++--- homeassistant/components/hassio/sensor.py | 103 +++++--- homeassistant/components/hassio/services.py | 2 +- tests/components/hassio/test_init.py | 6 +- 8 files changed, 277 insertions(+), 189 deletions(-) diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index c8ef1294b65..e21d3ba033e 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -93,10 +93,12 @@ from .const import ( DATA_SUPERVISOR_INFO, DOMAIN, HASSIO_UPDATE_INTERVAL, + STATS_COORDINATOR, ) from .coordinator import ( HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator, + HassioStatsDataUpdateCoordinator, get_addons_info, get_addons_list, get_addons_stats, @@ -474,6 +476,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await addon_coordinator.async_config_entry_first_refresh() hass.data[ADDONS_COORDINATOR] = addon_coordinator + stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry) + await stats_coordinator.async_config_entry_first_refresh() + hass.data[STATS_COORDINATOR] = stats_coordinator + def deprecated_setup_issue() -> None: os_info = get_os_info(hass) info = get_info(hass) @@ -545,5 +551,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Pop coordinators hass.data.pop(COORDINATOR, None) hass.data.pop(ADDONS_COORDINATOR, None) + hass.data.pop(STATS_COORDINATOR, None) return unload_ok diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index 51c5292cb56..6eb227bef84 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -79,6 +79,7 @@ STARTUP_COMPLETE = "complete" COORDINATOR = "hassio_coordinator" ADDONS_COORDINATOR = "hassio_addons_coordinator" +STATS_COORDINATOR = "hassio_stats_coordinator" DATA_COMPONENT: HassKey[HassIO] = HassKey(DOMAIN) @@ -97,6 +98,7 @@ DATA_ADDONS_STATS = "hassio_addons_stats" DATA_ADDONS_LIST = "hassio_addons_list" HASSIO_UPDATE_INTERVAL = timedelta(minutes=5) HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15) +HASSIO_STATS_UPDATE_INTERVAL = timedelta(seconds=60) ATTR_AUTO_UPDATE = "auto_update" ATTR_VERSION = "version" diff --git a/homeassistant/components/hassio/coordinator.py b/homeassistant/components/hassio/coordinator.py index 0c055917fcc..0c2f61284c8 100644 --- a/homeassistant/components/hassio/coordinator.py +++ b/homeassistant/components/hassio/coordinator.py @@ -35,7 +35,6 @@ from .const import ( ATTR_SLUG, ATTR_URL, ATTR_VERSION, - CONTAINER_INFO, CONTAINER_STATS, CORE_CONTAINER, DATA_ADDONS_INFO, @@ -60,6 +59,7 @@ from .const import ( DATA_SUPERVISOR_STATS, DOMAIN, HASSIO_ADDON_UPDATE_INTERVAL, + HASSIO_STATS_UPDATE_INTERVAL, HASSIO_UPDATE_INTERVAL, REQUEST_REFRESH_DELAY, SUPERVISOR_CONTAINER, @@ -319,6 +319,112 @@ def async_remove_devices_from_dev_reg( dev_reg.async_remove_device(dev.id) +class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator): + """Class to retrieve Hass.io container stats.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=config_entry, + name=DOMAIN, + update_interval=HASSIO_STATS_UPDATE_INTERVAL, + request_refresh_debouncer=Debouncer( + hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False + ), + ) + self.data: dict[str, Any] = {} + self.supervisor_client = get_supervisor_client(hass) + self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict( + lambda: defaultdict(set) + ) + + async def _async_update_data(self) -> dict[str, Any]: + """Update stats data via library.""" + try: + await self._fetch_stats() + except SupervisorError as err: + raise UpdateFailed(f"Error on Supervisor API: {err}") from err + + new_data: dict[str, Any] = {} + new_data[DATA_KEY_CORE] = get_core_stats(self.hass) + new_data[DATA_KEY_SUPERVISOR] = get_supervisor_stats(self.hass) + new_data[DATA_KEY_ADDONS] = get_addons_stats(self.hass) + return new_data + + async def _fetch_stats(self) -> None: + """Fetch container stats for subscribed entities.""" + container_updates = self._container_updates + data = self.hass.data + client = self.supervisor_client + + # Fetch core and supervisor stats + updates: dict[str, Awaitable] = {} + if CONTAINER_STATS in container_updates[CORE_CONTAINER]: + updates[DATA_CORE_STATS] = client.homeassistant.stats() + if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]: + updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats() + + if updates: + api_results: list[ResponseData] = await asyncio.gather(*updates.values()) + for key, result in zip(updates, api_results, strict=True): + data[key] = result.to_dict() + + # Fetch addon stats + addons_list = get_addons_list(self.hass) or [] + started_addons = { + addon[ATTR_SLUG] + for addon in addons_list + if addon.get("state") in {AddonState.STARTED, AddonState.STARTUP} + } + + addons_stats: dict[str, Any] = data.setdefault(DATA_ADDONS_STATS, {}) + + # Clean up cache for stopped/removed addons + for slug in addons_stats.keys() - started_addons: + del addons_stats[slug] + + # Fetch stats for addons with subscribed entities + addon_stats_results = dict( + await asyncio.gather( + *[ + self._update_addon_stats(slug) + for slug in started_addons + if CONTAINER_STATS in container_updates[slug] + ] + ) + ) + addons_stats.update(addon_stats_results) + + async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]: + """Update single addon stats.""" + try: + stats = await self.supervisor_client.addons.addon_stats(slug) + except SupervisorError as err: + _LOGGER.warning("Could not fetch stats for %s: %s", slug, err) + return (slug, None) + return (slug, stats.to_dict()) + + @callback + def async_enable_container_updates( + self, slug: str, entity_id: str, types: set[str] + ) -> CALLBACK_TYPE: + """Enable stats updates for a container.""" + enabled_updates = self._container_updates[slug] + for key in types: + enabled_updates[key].add(entity_id) + + @callback + def _remove() -> None: + for key in types: + enabled_updates[key].remove(entity_id) + + return _remove + + class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): """Class to retrieve Hass.io Add-on status.""" @@ -335,19 +441,16 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): name=DOMAIN, update_interval=HASSIO_ADDON_UPDATE_INTERVAL, # We don't want an immediate refresh since we want to avoid - # fetching the container stats right away and avoid hammering - # the Supervisor API on startup + # hammering the Supervisor API on startup request_refresh_debouncer=Debouncer( hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False ), ) self.hassio = hass.data[DATA_COMPONENT] - self.data = {} + self.data: dict[str, Any] = {} self.entry_id = config_entry.entry_id self.dev_reg = dev_reg - self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict( - lambda: defaultdict(set) - ) + self._addon_info_subscriptions: defaultdict[str, set[str]] = defaultdict(set) self.supervisor_client = get_supervisor_client(hass) self.jobs: SupervisorJobs = None # type: ignore[assignment] @@ -366,7 +469,6 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): new_data: dict[str, Any] = {} addons_info = get_addons_info(self.hass) or {} - addons_stats = get_addons_stats(self.hass) store_data = get_store(self.hass) addons_list = get_addons_list(self.hass) or [] @@ -381,7 +483,6 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): new_data[DATA_KEY_ADDONS] = { (slug := addon[ATTR_SLUG]): { **addon, - **(addons_stats.get(slug) or {}), ATTR_AUTO_UPDATE: (addons_info.get(slug) or {}).get( ATTR_AUTO_UPDATE, False ), @@ -431,8 +532,6 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): async def force_data_refresh(self, first_update: bool) -> None: """Force update of the addon info.""" - container_updates = self._container_updates - data = self.hass.data client = self.supervisor_client @@ -445,67 +544,27 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): data[DATA_SUPERVISOR_INFO]["addons"] = data[DATA_ADDONS_LIST] all_addons = {addon.slug for addon in installed_addons} - started_addons = { - addon.slug - for addon in installed_addons - if addon.state in {AddonState.STARTED, AddonState.STARTUP} - } - # # Update addon info if its the first update or # there is at least one entity that needs the data. - # - # When entities are added they call async_enable_container_updates - # to enable updates for the endpoints they need via - # async_added_to_hass. This ensures that we only update - # the data for the endpoints that are needed to avoid unnecessary - # API calls since otherwise we would fetch stats for all containers - # and throw them away. - # - for data_key, update_func, enabled_key, wanted_addons, needs_first_update in ( - ( - DATA_ADDONS_STATS, - self._update_addon_stats, - CONTAINER_STATS, - started_addons, - False, - ), - ( - DATA_ADDONS_INFO, - self._update_addon_info, - CONTAINER_INFO, - all_addons, - True, - ), - ): - container_data: dict[str, Any] = data.setdefault(data_key, {}) + addon_info: dict[str, Any] = data.setdefault(DATA_ADDONS_INFO, {}) - # Clean up cache - for slug in container_data.keys() - wanted_addons: - del container_data[slug] + # Clean up cache + for slug in addon_info.keys() - all_addons: + del addon_info[slug] - # Update cache from API - container_data.update( - dict( - await asyncio.gather( - *[ - update_func(slug) - for slug in wanted_addons - if (first_update and needs_first_update) - or enabled_key in container_updates[slug] - ] - ) + # Update cache from API + addon_info.update( + dict( + await asyncio.gather( + *[ + self._update_addon_info(slug) + for slug in all_addons + if (first_update) or self._addon_info_subscriptions.get(slug) + ] ) ) - - async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]: - """Update single addon stats.""" - try: - stats = await self.supervisor_client.addons.addon_stats(slug) - except SupervisorError as err: - _LOGGER.warning("Could not fetch stats for %s: %s", slug, err) - return (slug, None) - return (slug, stats.to_dict()) + ) async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]: """Return the info for an addon.""" @@ -521,18 +580,15 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator): return (slug, info_dict) @callback - def async_enable_container_updates( - self, slug: str, entity_id: str, types: set[str] + def async_enable_addon_info_updates( + self, slug: str, entity_id: str ) -> CALLBACK_TYPE: - """Enable updates for an add-on.""" - enabled_updates = self._container_updates[slug] - for key in types: - enabled_updates[key].add(entity_id) + """Enable info updates for an add-on.""" + self._addon_info_subscriptions[slug].add(entity_id) @callback def _remove() -> None: - for key in types: - enabled_updates[key].remove(entity_id) + self._addon_info_subscriptions[slug].discard(entity_id) return _remove @@ -591,20 +647,16 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): name=DOMAIN, update_interval=HASSIO_UPDATE_INTERVAL, # We don't want an immediate refresh since we want to avoid - # fetching the container stats right away and avoid hammering - # the Supervisor API on startup + # hammering the Supervisor API on startup request_refresh_debouncer=Debouncer( hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False ), ) self.hassio = hass.data[DATA_COMPONENT] - self.data = {} + self.data: dict[str, Any] = {} self.entry_id = config_entry.entry_id self.dev_reg = dev_reg self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None - self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict( - lambda: defaultdict(set) - ) self.supervisor_client = get_supervisor_client(hass) self.jobs = SupervisorJobs(hass) @@ -624,14 +676,8 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): if self.is_hass_os: new_data[DATA_KEY_OS] = get_os_info(self.hass) - new_data[DATA_KEY_CORE] = { - **(get_core_info(self.hass) or {}), - **get_core_stats(self.hass), - } - new_data[DATA_KEY_SUPERVISOR] = { - **supervisor_info, - **get_supervisor_stats(self.hass), - } + new_data[DATA_KEY_CORE] = get_core_info(self.hass) or {} + new_data[DATA_KEY_SUPERVISOR] = supervisor_info new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {} new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts} @@ -686,8 +732,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): async def force_data_refresh(self, first_update: bool) -> None: """Force update of the main component info.""" - container_updates = self._container_updates - data = self.hass.data client = self.supervisor_client @@ -698,10 +742,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): DATA_OS_INFO: client.os.info(), DATA_STORE: client.store.info(), } - if CONTAINER_STATS in container_updates[CORE_CONTAINER]: - updates[DATA_CORE_STATS] = client.homeassistant.stats() - if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]: - updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats() api_results: list[ResponseData] = await asyncio.gather(*updates.values()) for key, result in zip(updates, api_results, strict=True): @@ -714,22 +754,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): # Refresh jobs data await self.jobs.refresh_data(first_update) - @callback - def async_enable_container_updates( - self, slug: str, entity_id: str, types: set[str] - ) -> CALLBACK_TYPE: - """Enable updates for an add-on.""" - enabled_updates = self._container_updates[slug] - for key in types: - enabled_updates[key].add(entity_id) - - @callback - def _remove() -> None: - for key in types: - enabled_updates[key].remove(entity_id) - - return _remove - async def _async_refresh( self, log_failures: bool = True, diff --git a/homeassistant/components/hassio/diagnostics.py b/homeassistant/components/hassio/diagnostics.py index 61fbdb72eb1..becc663c87d 100644 --- a/homeassistant/components/hassio/diagnostics.py +++ b/homeassistant/components/hassio/diagnostics.py @@ -11,8 +11,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .const import ADDONS_COORDINATOR, COORDINATOR -from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator +from .const import ADDONS_COORDINATOR, COORDINATOR, STATS_COORDINATOR +from .coordinator import ( + HassioAddOnDataUpdateCoordinator, + HassioDataUpdateCoordinator, + HassioStatsDataUpdateCoordinator, +) async def async_get_config_entry_diagnostics( @@ -22,6 +26,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR] addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR] + stats_coordinator: HassioStatsDataUpdateCoordinator = hass.data[STATS_COORDINATOR] device_registry = dr.async_get(hass) entity_registry = er.async_get(hass) @@ -55,5 +60,6 @@ async def async_get_config_entry_diagnostics( return { "coordinator_data": coordinator.data, "addons_coordinator_data": addons_coordinator.data, + "stats_coordinator_data": stats_coordinator.data, "devices": devices, } diff --git a/homeassistant/components/hassio/entity.py b/homeassistant/components/hassio/entity.py index 7b04326e8fc..d7514635367 100644 --- a/homeassistant/components/hassio/entity.py +++ b/homeassistant/components/hassio/entity.py @@ -13,7 +13,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( ATTR_SLUG, CONTAINER_STATS, - CORE_CONTAINER, DATA_KEY_ADDONS, DATA_KEY_CORE, DATA_KEY_HOST, @@ -21,10 +20,64 @@ from .const import ( DATA_KEY_OS, DATA_KEY_SUPERVISOR, DOMAIN, - KEY_TO_UPDATE_TYPES, - SUPERVISOR_CONTAINER, ) -from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator +from .coordinator import ( + HassioAddOnDataUpdateCoordinator, + HassioDataUpdateCoordinator, + HassioStatsDataUpdateCoordinator, +) + + +class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]): + """Base entity for container stats (CPU, memory).""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: HassioStatsDataUpdateCoordinator, + entity_description: EntityDescription, + *, + container_id: str, + data_key: str, + device_id: str, + unique_id_prefix: str, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._container_id = container_id + self._data_key = data_key + self._attr_unique_id = f"{unique_id_prefix}_{entity_description.key}" + self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)}) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + if self._data_key == DATA_KEY_ADDONS: + return ( + super().available + and DATA_KEY_ADDONS in self.coordinator.data + and self.entity_description.key + in ( + self.coordinator.data[DATA_KEY_ADDONS].get(self._container_id) or {} + ) + ) + return ( + super().available + and self._data_key in self.coordinator.data + and self.entity_description.key in self.coordinator.data[self._data_key] + ) + + async def async_added_to_hass(self) -> None: + """Subscribe to stats updates.""" + await super().async_added_to_hass() + self.async_on_remove( + self.coordinator.async_enable_container_updates( + self._container_id, self.entity_id, {CONTAINER_STATS} + ) + ) + await self.coordinator.async_request_refresh() class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]): @@ -56,16 +109,13 @@ class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]): ) async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" + """Subscribe to addon info updates.""" await super().async_added_to_hass() - update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key] self.async_on_remove( - self.coordinator.async_enable_container_updates( - self._addon_slug, self.entity_id, update_types + self.coordinator.async_enable_addon_info_updates( + self._addon_slug, self.entity_id ) ) - if CONTAINER_STATS in update_types: - await self.coordinator.async_request_refresh() class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]): @@ -146,18 +196,6 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]): in self.coordinator.data[DATA_KEY_SUPERVISOR] ) - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - await super().async_added_to_hass() - update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key] - self.async_on_remove( - self.coordinator.async_enable_container_updates( - SUPERVISOR_CONTAINER, self.entity_id, update_types - ) - ) - if CONTAINER_STATS in update_types: - await self.coordinator.async_request_refresh() - class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]): """Base Entity for Core.""" @@ -184,18 +222,6 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]): and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE] ) - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - await super().async_added_to_hass() - update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key] - self.async_on_remove( - self.coordinator.async_enable_container_updates( - CORE_CONTAINER, self.entity_id, update_types - ) - ) - if CONTAINER_STATS in update_types: - await self.coordinator.async_request_refresh() - class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]): """Base Entity for Mount.""" diff --git a/homeassistant/components/hassio/sensor.py b/homeassistant/components/hassio/sensor.py index 5bd3c0df3af..7266e940e26 100644 --- a/homeassistant/components/hassio/sensor.py +++ b/homeassistant/components/hassio/sensor.py @@ -17,21 +17,24 @@ from .const import ( ADDONS_COORDINATOR, ATTR_CPU_PERCENT, ATTR_MEMORY_PERCENT, + ATTR_SLUG, ATTR_VERSION, ATTR_VERSION_LATEST, COORDINATOR, + CORE_CONTAINER, DATA_KEY_ADDONS, DATA_KEY_CORE, DATA_KEY_HOST, DATA_KEY_OS, DATA_KEY_SUPERVISOR, + STATS_COORDINATOR, + SUPERVISOR_CONTAINER, ) from .entity import ( HassioAddonEntity, - HassioCoreEntity, HassioHostEntity, HassioOSEntity, - HassioSupervisorEntity, + HassioStatsEntity, ) COMMON_ENTITY_DESCRIPTIONS = ( @@ -64,10 +67,7 @@ STATS_ENTITY_DESCRIPTIONS = ( ), ) -ADDON_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS + STATS_ENTITY_DESCRIPTIONS -CORE_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS -SUPERVISOR_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS HOST_ENTITY_DESCRIPTIONS = ( SensorEntityDescription( @@ -116,36 +116,63 @@ async def async_setup_entry( ) -> None: """Sensor set up for Hass.io config entry.""" addons_coordinator = hass.data[ADDONS_COORDINATOR] + coordinator = hass.data[COORDINATOR] + stats_coordinator = hass.data[STATS_COORDINATOR] - entities: list[ - HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor - ] = [ + entities: list[SensorEntity] = [] + + # Add-on non-stats sensors (version, version_latest) + entities.extend( HassioAddonSensor( addon=addon, coordinator=addons_coordinator, entity_description=entity_description, ) for addon in addons_coordinator.data[DATA_KEY_ADDONS].values() - for entity_description in ADDON_ENTITY_DESCRIPTIONS - ] - - coordinator = hass.data[COORDINATOR] - entities.extend( - CoreSensor( - coordinator=coordinator, - entity_description=entity_description, - ) - for entity_description in CORE_ENTITY_DESCRIPTIONS + for entity_description in COMMON_ENTITY_DESCRIPTIONS ) + # Add-on stats sensors (cpu_percent, memory_percent) entities.extend( - SupervisorSensor( - coordinator=coordinator, + HassioStatsSensor( + coordinator=stats_coordinator, entity_description=entity_description, + container_id=addon[ATTR_SLUG], + data_key=DATA_KEY_ADDONS, + device_id=addon[ATTR_SLUG], + unique_id_prefix=addon[ATTR_SLUG], ) - for entity_description in SUPERVISOR_ENTITY_DESCRIPTIONS + for addon in addons_coordinator.data[DATA_KEY_ADDONS].values() + for entity_description in STATS_ENTITY_DESCRIPTIONS ) + # Core stats sensors + entities.extend( + HassioStatsSensor( + coordinator=stats_coordinator, + entity_description=entity_description, + container_id=CORE_CONTAINER, + data_key=DATA_KEY_CORE, + device_id="core", + unique_id_prefix="home_assistant_core", + ) + for entity_description in STATS_ENTITY_DESCRIPTIONS + ) + + # Supervisor stats sensors + entities.extend( + HassioStatsSensor( + coordinator=stats_coordinator, + entity_description=entity_description, + container_id=SUPERVISOR_CONTAINER, + data_key=DATA_KEY_SUPERVISOR, + device_id="supervisor", + unique_id_prefix="home_assistant_supervisor", + ) + for entity_description in STATS_ENTITY_DESCRIPTIONS + ) + + # Host sensors entities.extend( HostSensor( coordinator=coordinator, @@ -154,6 +181,7 @@ async def async_setup_entry( for entity_description in HOST_ENTITY_DESCRIPTIONS ) + # OS sensors if coordinator.is_hass_os: entities.extend( HassioOSSensor( @@ -177,8 +205,21 @@ class HassioAddonSensor(HassioAddonEntity, SensorEntity): ] +class HassioStatsSensor(HassioStatsEntity, SensorEntity): + """Sensor to track container stats.""" + + @property + def native_value(self) -> str: + """Return native value of entity.""" + if self._data_key == DATA_KEY_ADDONS: + return self.coordinator.data[DATA_KEY_ADDONS][self._container_id][ + self.entity_description.key + ] + return self.coordinator.data[self._data_key][self.entity_description.key] + + class HassioOSSensor(HassioOSEntity, SensorEntity): - """Sensor to track a Hass.io add-on attribute.""" + """Sensor to track a Hass.io OS attribute.""" @property def native_value(self) -> str: @@ -186,24 +227,6 @@ class HassioOSSensor(HassioOSEntity, SensorEntity): return self.coordinator.data[DATA_KEY_OS][self.entity_description.key] -class CoreSensor(HassioCoreEntity, SensorEntity): - """Sensor to track a core attribute.""" - - @property - def native_value(self) -> str: - """Return native value of entity.""" - return self.coordinator.data[DATA_KEY_CORE][self.entity_description.key] - - -class SupervisorSensor(HassioSupervisorEntity, SensorEntity): - """Sensor to track a supervisor attribute.""" - - @property - def native_value(self) -> str: - """Return native value of entity.""" - return self.coordinator.data[DATA_KEY_SUPERVISOR][self.entity_description.key] - - class HostSensor(HassioHostEntity, SensorEntity): """Sensor to track a host attribute.""" diff --git a/homeassistant/components/hassio/services.py b/homeassistant/components/hassio/services.py index 4db1338c342..5f4d8554337 100644 --- a/homeassistant/components/hassio/services.py +++ b/homeassistant/components/hassio/services.py @@ -32,7 +32,6 @@ from homeassistant.helpers import ( from homeassistant.util.dt import now from .const import ( - COORDINATOR, ATTR_ADDON, ATTR_ADDONS, ATTR_APP, @@ -45,6 +44,7 @@ from .const import ( ATTR_LOCATION, ATTR_PASSWORD, ATTR_SLUG, + COORDINATOR, DOMAIN, SupervisorEntityModel, ) diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index d0b97f78315..07761441e7a 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -969,15 +969,15 @@ async def test_coordinator_updates_stats_entities_enabled( # Initial refresh without stats supervisor_client.reload_updates.assert_not_called() - # Refresh with stats once we know which ones are needed + # Stats entities trigger refresh on the stats coordinator, + # which does not call reload_updates async_fire_time_changed( hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) ) await hass.async_block_till_done() - supervisor_client.reload_updates.assert_called_once() + supervisor_client.reload_updates.assert_not_called() - supervisor_client.reload_updates.reset_mock() async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) await hass.async_block_till_done() supervisor_client.reload_updates.assert_not_called()