1
0
mirror of https://github.com/home-assistant/core.git synced 2025-12-24 12:59:34 +00:00

Portainer add resource usage of containers (#155113)

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Erwin Douna
2025-11-03 06:37:04 +01:00
committed by GitHub
parent af8cd0414b
commit 4e48c881aa
12 changed files with 1453 additions and 68 deletions

View File

@@ -4,9 +4,6 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from pyportainer.models.docker import DockerContainer
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -18,7 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .coordinator import PortainerCoordinator
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
@@ -27,24 +24,31 @@ from .entity import (
@dataclass(frozen=True, kw_only=True)
class PortainerBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer binary sensor description."""
class PortainerContainerBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer container binary sensor description."""
state_fn: Callable[[Any], bool]
state_fn: Callable[[PortainerContainerData], bool | None]
CONTAINER_SENSORS: tuple[PortainerBinarySensorEntityDescription, ...] = (
PortainerBinarySensorEntityDescription(
@dataclass(frozen=True, kw_only=True)
class PortainerEndpointBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer endpoint binary sensor description."""
state_fn: Callable[[PortainerCoordinatorData], bool | None]
CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] = (
PortainerContainerBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.state == "running",
state_fn=lambda data: data.container.state == "running",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
ENDPOINT_SENSORS: tuple[PortainerBinarySensorEntityDescription, ...] = (
PortainerBinarySensorEntityDescription(
ENDPOINT_SENSORS: tuple[PortainerEndpointBinarySensorEntityDescription, ...] = (
PortainerEndpointBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.endpoint.status == 1, # 1 = Running | 2 = Stopped
@@ -76,7 +80,7 @@ async def async_setup_entry(
)
def _async_add_new_containers(
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
containers: list[tuple[PortainerCoordinatorData, PortainerContainerData]],
) -> None:
"""Add new container binary sensors."""
async_add_entities(
@@ -113,12 +117,12 @@ async def async_setup_entry(
class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
"""Representation of a Portainer endpoint binary sensor entity."""
entity_description: PortainerBinarySensorEntityDescription
entity_description: PortainerEndpointBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerBinarySensorEntityDescription,
entity_description: PortainerEndpointBinarySensorEntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize Portainer endpoint binary sensor entity."""
@@ -141,13 +145,13 @@ class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
"""Representation of a Portainer container sensor."""
entity_description: PortainerBinarySensorEntityDescription
entity_description: PortainerContainerBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerBinarySensorEntityDescription,
device_info: DockerContainer,
entity_description: PortainerContainerBinarySensorEntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container sensor."""
@@ -164,6 +168,4 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(
self.coordinator.data[self.endpoint_id].containers[self.device_name]
)
return self.entity_description.state_fn(self.container_data)

View File

@@ -12,7 +12,6 @@ from pyportainer.exceptions import (
PortainerConnectionError,
PortainerTimeoutError,
)
from pyportainer.models.docker import DockerContainer
from homeassistant.components.button import (
ButtonDeviceClass,
@@ -26,7 +25,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import DOMAIN
from .coordinator import PortainerCoordinator, PortainerCoordinatorData
from .coordinator import (
PortainerContainerData,
PortainerCoordinator,
PortainerCoordinatorData,
)
from .entity import PortainerContainerEntity
@@ -64,7 +67,7 @@ async def async_setup_entry(
coordinator = entry.runtime_data
def _async_add_new_containers(
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
containers: list[tuple[PortainerCoordinatorData, PortainerContainerData]],
) -> None:
"""Add new container button sensors."""
async_add_entities(
@@ -97,7 +100,7 @@ class PortainerButton(PortainerContainerEntity, ButtonEntity):
self,
coordinator: PortainerCoordinator,
entity_description: PortainerButtonDescription,
device_info: DockerContainer,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer button entity."""

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
@@ -13,7 +14,7 @@ from pyportainer import (
PortainerConnectionError,
PortainerTimeoutError,
)
from pyportainer.models.docker import DockerContainer
from pyportainer.models.docker import DockerContainer, DockerContainerStats
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
@@ -39,11 +40,20 @@ class PortainerCoordinatorData:
id: int
name: str | None
endpoint: Endpoint
containers: dict[str, DockerContainer]
containers: dict[str, PortainerContainerData]
docker_version: DockerVersion
docker_info: DockerInfo
@dataclass(slots=True)
class PortainerContainerData:
"""Container data held by the Portainer coordinator."""
container: DockerContainer
stats: DockerContainerStats
stats_pre: DockerContainerStats | None
class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorData]]):
"""Data Update Coordinator for Portainer."""
@@ -72,7 +82,9 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
Callable[[list[PortainerCoordinatorData]], None]
] = []
self.new_containers_callbacks: list[
Callable[[list[tuple[PortainerCoordinatorData, DockerContainer]]], None]
Callable[
[list[tuple[PortainerCoordinatorData, PortainerContainerData]]], None
]
] = []
async def _async_setup(self) -> None:
@@ -119,8 +131,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
else:
_LOGGER.debug("Fetched endpoints: %s", endpoints)
mapped_endpoints: dict[int, PortainerCoordinatorData] = {}
for endpoint in endpoints:
@@ -136,6 +146,47 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
containers = await self.portainer.get_containers(endpoint.id)
docker_version = await self.portainer.docker_version(endpoint.id)
docker_info = await self.portainer.docker_info(endpoint.id)
container_map: dict[str, PortainerContainerData] = {}
container_stats_task = [
(
container,
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
),
)
for container in containers
]
container_stats_gather = await asyncio.gather(
*[task for _, task in container_stats_task],
)
for (container, _), container_stats in zip(
container_stats_task, container_stats_gather, strict=False
):
container_name = container.names[0].replace("/", " ").strip()
# Store previous stats if available. This is used to calculate deltas for CPU and network usage
# In the first call it will be None, since it has nothing to compare with
# Added a walrus pattern to check if not None on prev_container, to keep mypy happy. :)
container_map[container_name] = PortainerContainerData(
container=container,
stats=container_stats,
stats_pre=(
prev_container.stats
if self.data
and (prev_data := self.data.get(endpoint.id)) is not None
and (
prev_container := prev_data.containers.get(
container_name
)
)
is not None
else None
),
)
except PortainerConnectionError as err:
_LOGGER.exception("Connection error")
raise UpdateFailed(
@@ -155,10 +206,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
id=endpoint.id,
name=endpoint.name,
endpoint=endpoint,
containers={
container.names[0].replace("/", " ").strip(): container
for container in containers
},
containers=container_map,
docker_version=docker_version,
docker_info=docker_info,
)
@@ -179,7 +227,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
# Surprise, we also handle containers here :)
current_containers = {
(endpoint.id, container.id)
(endpoint.id, container.container.id)
for endpoint in mapped_endpoints.values()
for container in endpoint.containers.values()
}

View File

@@ -30,11 +30,11 @@ def _serialize_coordinator(coordinator: PortainerCoordinator) -> dict[str, Any]:
},
"containers": [
{
"id": container.id,
"names": list(container.names or []),
"image": container.image,
"state": container.state,
"status": container.status,
"id": container.container.id,
"names": list(container.container.names or []),
"image": container.container.image,
"state": container.container.state,
"status": container.container.status,
}
for container in endpoint_data.containers.values()
],

View File

@@ -1,6 +1,5 @@
"""Base class for Portainer entities."""
from pyportainer.models.docker import DockerContainer
from yarl import URL
from homeassistant.const import CONF_URL
@@ -8,7 +7,11 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEFAULT_NAME, DOMAIN
from .coordinator import PortainerCoordinator, PortainerCoordinatorData
from .coordinator import (
PortainerContainerData,
PortainerCoordinator,
PortainerCoordinatorData,
)
class PortainerCoordinatorEntity(CoordinatorEntity[PortainerCoordinator]):
@@ -47,21 +50,22 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
def __init__(
self,
device_info: DockerContainer,
device_info: PortainerContainerData,
coordinator: PortainerCoordinator,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer container."""
super().__init__(coordinator)
self._device_info = device_info
self.device_id = self._device_info.id
self.device_id = self._device_info.container.id
self.endpoint_id = via_device.endpoint.id
# Container ID's are ephemeral, so use the container name for the unique ID
# The first one, should always be unique, it's fine if users have aliases
# According to Docker's API docs, the first name is unique
assert self._device_info.names, "Container names list unexpectedly empty"
self.device_name = self._device_info.names[0].replace("/", " ").strip()
names = self._device_info.container.names
assert names, "Container names list unexpectedly empty"
self.device_name = names[0].replace("/", " ").strip()
self._attr_device_info = DeviceInfo(
identifiers={
@@ -79,3 +83,8 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
),
translation_key=None if self.device_name else "unknown_container",
)
@property
def container_data(self) -> PortainerContainerData:
"""Return the coordinator data for this container."""
return self.coordinator.data[self.endpoint_id].containers[self.device_name]

View File

@@ -22,6 +22,9 @@
"cpu_total": {
"default": "mdi:cpu-64-bit"
},
"cpu_usage_total": {
"default": "mdi:cpu-64-bit"
},
"docker_version": {
"default": "mdi:docker"
},
@@ -34,9 +37,18 @@
"kernel_version": {
"default": "mdi:memory"
},
"memory_limit": {
"default": "mdi:memory"
},
"memory_total": {
"default": "mdi:memory"
},
"memory_usage": {
"default": "mdi:memory"
},
"memory_usage_percentage": {
"default": "mdi:memory"
},
"operating_system": {
"default": "mdi:chip"
},

View File

@@ -5,8 +5,6 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from pyportainer.models.docker import DockerContainer
from homeassistant.components.sensor import (
EntityCategory,
SensorDeviceClass,
@@ -15,11 +13,15 @@ from homeassistant.components.sensor import (
SensorStateClass,
StateType,
)
from homeassistant.const import UnitOfInformation
from homeassistant.const import PERCENTAGE, UnitOfInformation
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PortainerConfigEntry, PortainerCoordinator
from .coordinator import (
PortainerConfigEntry,
PortainerContainerData,
PortainerCoordinator,
)
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
@@ -31,7 +33,7 @@ from .entity import (
class PortainerContainerSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer container sensor description."""
value_fn: Callable[[DockerContainer], StateType]
value_fn: Callable[[PortainerContainerData], StateType]
@dataclass(frozen=True, kw_only=True)
@@ -45,7 +47,70 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
PortainerContainerSensorEntityDescription(
key="image",
translation_key="image",
value_fn=lambda data: data.image,
value_fn=lambda data: data.container.image,
),
PortainerContainerSensorEntityDescription(
key="memory_limit",
translation_key="memory_limit",
value_fn=lambda data: data.stats.memory_stats.limit,
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerContainerSensorEntityDescription(
key="memory_usage",
translation_key="memory_usage",
value_fn=lambda data: data.stats.memory_stats.usage,
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerContainerSensorEntityDescription(
key="memory_usage_percentage",
translation_key="memory_usage_percentage",
value_fn=lambda data: (
(data.stats.memory_stats.usage / data.stats.memory_stats.limit) * 100.0
if data.stats.memory_stats.limit > 0 and data.stats.memory_stats.usage > 0
else 0.0
),
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerContainerSensorEntityDescription(
key="cpu_usage_total",
translation_key="cpu_usage_total",
value_fn=lambda data: (
(total_delta / system_delta) * data.stats.cpu_stats.online_cpus * 100.0
if (prev := data.stats_pre) is not None
and (
system_delta := (
data.stats.cpu_stats.system_cpu_usage
- prev.cpu_stats.system_cpu_usage
)
)
> 0
and (
total_delta := (
data.stats.cpu_stats.cpu_usage.total_usage
- prev.cpu_stats.cpu_usage.total_usage
)
)
>= 0
and data.stats.cpu_stats.online_cpus > 0
else 0.0
),
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
),
)
ENDPOINT_SENSORS: tuple[PortainerEndpointSensorEntityDescription, ...] = (
@@ -174,7 +239,7 @@ async def async_setup_entry(
)
def _async_add_new_containers(
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
containers: list[tuple[PortainerCoordinatorData, PortainerContainerData]],
) -> None:
"""Add new container sensors."""
async_add_entities(
@@ -186,7 +251,7 @@ async def async_setup_entry(
)
for (endpoint, container) in containers
for entity_description in CONTAINER_SENSORS
if entity_description.value_fn(container)
if entity_description.value_fn(container) is not None
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
@@ -217,7 +282,7 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
self,
coordinator: PortainerCoordinator,
entity_description: PortainerContainerSensorEntityDescription,
device_info: DockerContainer,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container sensor."""
@@ -234,9 +299,7 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(
self.coordinator.data[self.endpoint_id].containers[self.device_name]
)
return self.entity_description.value_fn(self.container_data)
class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):

View File

@@ -83,6 +83,9 @@
"cpu_total": {
"name": "Total CPU"
},
"cpu_usage_total": {
"name": "CPU usage total"
},
"docker_version": {
"name": "Docker version"
},
@@ -95,9 +98,18 @@
"kernel_version": {
"name": "Kernel version"
},
"memory_limit": {
"name": "Memory limit"
},
"memory_total": {
"name": "Total memory"
},
"memory_usage": {
"name": "Memory usage"
},
"memory_usage_percentage": {
"name": "Memory usage percentage"
},
"operating_system": {
"name": "Operating system"
},

View File

@@ -12,7 +12,6 @@ from pyportainer.exceptions import (
PortainerConnectionError,
PortainerTimeoutError,
)
from pyportainer.models.docker import DockerContainer
from homeassistant.components.switch import (
SwitchDeviceClass,
@@ -25,7 +24,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import DOMAIN
from .coordinator import PortainerCoordinator
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import PortainerContainerEntity, PortainerCoordinatorData
@@ -33,7 +32,7 @@ from .entity import PortainerContainerEntity, PortainerCoordinatorData
class PortainerSwitchEntityDescription(SwitchEntityDescription):
"""Class to hold Portainer switch description."""
is_on_fn: Callable[[DockerContainer], bool | None]
is_on_fn: Callable[[PortainerContainerData], bool | None]
turn_on_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
turn_off_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
@@ -72,7 +71,7 @@ SWITCHES: tuple[PortainerSwitchEntityDescription, ...] = (
key="container",
translation_key="container",
device_class=SwitchDeviceClass.SWITCH,
is_on_fn=lambda data: data.state == "running",
is_on_fn=lambda data: data.container.state == "running",
turn_on_fn=perform_action,
turn_off_fn=perform_action,
),
@@ -88,7 +87,7 @@ async def async_setup_entry(
coordinator = entry.runtime_data
def _async_add_new_containers(
containers: list[tuple[PortainerCoordinatorData, DockerContainer]],
containers: list[tuple[PortainerCoordinatorData, PortainerContainerData]],
) -> None:
"""Add new container switch sensors."""
async_add_entities(
@@ -121,7 +120,7 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
self,
coordinator: PortainerCoordinator,
entity_description: PortainerSwitchEntityDescription,
device_info: DockerContainer,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container switch."""
@@ -133,9 +132,7 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
@property
def is_on(self) -> bool | None:
"""Return the state of the device."""
return self.entity_description.is_on_fn(
self.coordinator.data[self.endpoint_id].containers[self.device_name]
)
return self.entity_description.is_on_fn(self.container_data)
async def async_turn_on(self, **kwargs: Any) -> None:
"""Start (turn on) the container."""

View File

@@ -3,7 +3,7 @@
from collections.abc import Generator
from unittest.mock import AsyncMock, patch
from pyportainer.models.docker import DockerContainer
from pyportainer.models.docker import DockerContainer, DockerContainerStats
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
import pytest
@@ -60,6 +60,9 @@ def mock_portainer_client() -> Generator[AsyncMock]:
client.docker_version.return_value = DockerVersion.from_dict(
load_json_value_fixture("docker_version.json", DOMAIN)
)
client.container_stats.return_value = DockerContainerStats.from_dict(
load_json_value_fixture("container_stats.json", DOMAIN)
)
client.restart_container = AsyncMock(return_value=None)

View File

@@ -0,0 +1,96 @@
{
"read": "2015-01-08T22:57:31.547920715Z",
"pids_stats": {
"current": 3
},
"networks": {
"eth0": {
"rx_bytes": 5338,
"rx_dropped": 0,
"rx_errors": 0,
"rx_packets": 36,
"tx_bytes": 648,
"tx_dropped": 0,
"tx_errors": 0,
"tx_packets": 8
},
"eth5": {
"rx_bytes": 4641,
"rx_dropped": 0,
"rx_errors": 0,
"rx_packets": 26,
"tx_bytes": 690,
"tx_dropped": 0,
"tx_errors": 0,
"tx_packets": 9
}
},
"memory_stats": {
"stats": {
"total_pgmajfault": 0,
"cache": 0,
"mapped_file": 0,
"total_inactive_file": 0,
"pgpgout": 414,
"rss": 6537216,
"total_mapped_file": 0,
"writeback": 0,
"unevictable": 0,
"pgpgin": 477,
"total_unevictable": 0,
"pgmajfault": 0,
"total_rss": 6537216,
"total_rss_huge": 6291456,
"total_writeback": 0,
"total_inactive_anon": 0,
"rss_huge": 6291456,
"hierarchical_memory_limit": 67108864,
"total_pgfault": 964,
"total_active_file": 0,
"active_anon": 6537216,
"total_active_anon": 6537216,
"total_pgpgout": 414,
"total_cache": 0,
"inactive_anon": 0,
"active_file": 0,
"pgfault": 964,
"inactive_file": 0,
"total_pgpgin": 477
},
"max_usage": 6651904,
"usage": 6537216,
"failcnt": 0,
"limit": 67108864
},
"blkio_stats": {},
"cpu_stats": {
"cpu_usage": {
"percpu_usage": [8646879, 24472255, 36438778, 30657443],
"usage_in_usermode": 50000000,
"total_usage": 100215355,
"usage_in_kernelmode": 30000000
},
"system_cpu_usage": 739306590000000,
"online_cpus": 4,
"throttling_data": {
"periods": 0,
"throttled_periods": 0,
"throttled_time": 0
}
},
"precpu_stats": {
"cpu_usage": {
"percpu_usage": [8646879, 24350896, 36438778, 30657443],
"usage_in_usermode": 50000000,
"total_usage": 100093996,
"usage_in_kernelmode": 30000000
},
"system_cpu_usage": 9492140000000,
"online_cpus": 4,
"throttling_data": {
"periods": 0,
"throttled_periods": 0,
"throttled_time": 0
}
}
}

File diff suppressed because it is too large Load Diff