mirror of
https://github.com/home-assistant/operating-system.git
synced 2026-04-02 08:32:46 +01:00
When running tests on an image that contains older hassio components (Supervisor or plugins), the autoupdate may interfere with the test run. To avoid this, patch Suprvisor updater config as early as possible and restart Supervisor. For Supervisor tests, we need all components to be updated, so parametrize the supervisor update test to update all plugins too.
244 lines
9.8 KiB
Python
244 lines
9.8 KiB
Python
import logging
|
|
from time import sleep
|
|
|
|
import pytest
|
|
from labgrid.driver import ExecutionError
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@pytest.fixture(scope="module")
|
|
def stash() -> dict:
|
|
"""Simple stash for sharing data between tests in this module."""
|
|
stash = {}
|
|
return stash
|
|
|
|
|
|
@pytest.mark.dependency()
|
|
@pytest.mark.timeout(360)
|
|
def test_start_supervisor(shell, shell_json):
|
|
# Disable auto-updates to avoid interference with other tests,
|
|
# do it directly on config level and restart Supervisor via systemd.
|
|
shell.run_check(
|
|
"jq '.auto_update = false' /mnt/data/supervisor/updater.json > /tmp/updater.json"
|
|
" && mv /tmp/updater.json /mnt/data/supervisor/updater.json"
|
|
" && systemctl restart hassos-supervisor.service"
|
|
)
|
|
|
|
def check_container_running(container_name):
|
|
out = shell.run_check(f"docker container inspect -f '{{{{.State.Status}}}}' {container_name} || true")
|
|
return "running" in out
|
|
|
|
while True:
|
|
if check_container_running("homeassistant") and check_container_running("hassio_supervisor"):
|
|
break
|
|
|
|
sleep(1)
|
|
|
|
supervisor_ip = "\n".join(
|
|
shell.run_check("docker inspect --format='{{.NetworkSettings.Networks.bridge.IPAddress}}' hassio_supervisor")
|
|
)
|
|
|
|
while True:
|
|
try:
|
|
if shell_json(f"curl -sSL http://{supervisor_ip}/supervisor/ping").get("result") == "ok":
|
|
break
|
|
except ExecutionError:
|
|
pass # avoid failure when the container is restarting
|
|
|
|
sleep(1)
|
|
|
|
|
|
logger.info("Waiting for Home Assistant Core to be installed and started...")
|
|
core_install_started = False
|
|
while True:
|
|
try:
|
|
jobs_info = shell_json("ha jobs info --no-progress --raw-json")
|
|
if jobs_info.get("result") != "ok":
|
|
sleep(5)
|
|
continue
|
|
jobs = jobs_info.get("data", {}).get("jobs", [])
|
|
core_installing = any(
|
|
j.get("name") == "home_assistant_core_install" and not j.get("done")
|
|
for j in jobs
|
|
)
|
|
if core_installing:
|
|
# install is in progress
|
|
if not core_install_started:
|
|
logger.info("Home Assistant Core install job detected, waiting for completion...")
|
|
core_install_started = True
|
|
elif core_install_started:
|
|
# started and not installing anymore means finished
|
|
logger.info("Home Assistant Core install/start complete")
|
|
break
|
|
except ExecutionError:
|
|
pass # avoid failure when the supervisor/CLI is restarting
|
|
|
|
sleep(5)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_start_supervisor"])
|
|
def test_check_supervisor(shell_json):
|
|
# check supervisor info
|
|
supervisor_info = shell_json("ha supervisor info --no-progress --raw-json")
|
|
assert supervisor_info.get("result") == "ok", "supervisor info failed"
|
|
logger.info("Supervisor info: %s", supervisor_info)
|
|
# check network info
|
|
network_info = shell_json("ha network info --no-progress --raw-json")
|
|
assert network_info.get("result") == "ok", "network info failed"
|
|
logger.info("Network info: %s", network_info)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_check_supervisor"])
|
|
@pytest.mark.timeout(120)
|
|
@pytest.mark.parametrize("component", ["supervisor", "audio", "cli", "dns", "observer", "multicast"])
|
|
def test_update_components(shell_json, component):
|
|
info = shell_json(f"ha {component} info --no-progress --raw-json")
|
|
version = info.get("data").get("version")
|
|
version_latest = info.get("data").get("version_latest")
|
|
assert version_latest, f"Missing latest {component} version info"
|
|
if version == version_latest:
|
|
logger.info("%s is already up to date", component)
|
|
pytest.skip(f"{component} is already up to date")
|
|
else:
|
|
result = shell_json(f"ha {component} update --no-progress --raw-json")
|
|
if result.get("result") == "error" and "Another job is running" in result.get("message"):
|
|
pass
|
|
else:
|
|
assert result.get("result") == "ok", f"{component} update failed: {result}"
|
|
|
|
while True:
|
|
try:
|
|
info = shell_json(f"ha {component} info --no-progress --raw-json")
|
|
data = info.get("data")
|
|
if data and data.get("version") == data.get("version_latest"):
|
|
logger.info(
|
|
"%s updated from %s to %s: %s",
|
|
component,
|
|
version,
|
|
data.get("version"),
|
|
info,
|
|
)
|
|
break
|
|
except ExecutionError:
|
|
pass # avoid failure when the container is restarting
|
|
|
|
sleep(1)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_check_supervisor"])
|
|
def test_supervisor_is_updated(shell_json):
|
|
supervisor_info = shell_json("ha supervisor info --no-progress --raw-json")
|
|
data = supervisor_info.get("data")
|
|
assert data and data.get("version") == data.get("version_latest")
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_supervisor_is_updated"])
|
|
def test_app_install(shell_json):
|
|
# install Core SSH app
|
|
assert (
|
|
shell_json("ha apps install core_ssh --no-progress --raw-json").get("result") == "ok"
|
|
), "Core SSH app install failed"
|
|
# check Core SSH app is installed
|
|
assert (
|
|
shell_json("ha apps info core_ssh --no-progress --raw-json").get("data", {}).get("version") is not None
|
|
), "Core SSH app not installed"
|
|
# start Core SSH app
|
|
assert (
|
|
shell_json("ha apps start core_ssh --no-progress --raw-json").get("result") == "ok"
|
|
), "Core SSH app start failed"
|
|
# check Core SSH app is running
|
|
ssh_info = shell_json("ha apps info core_ssh --no-progress --raw-json")
|
|
assert ssh_info.get("data", {}).get("state") == "started", "Core SSH app not running"
|
|
logger.info("Core SSH app info: %s", ssh_info)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_supervisor_is_updated"])
|
|
def test_supervisor_errors(shell_json):
|
|
# run Supervisor health check
|
|
health_check = shell_json("ha resolution healthcheck --no-progress --raw-json")
|
|
assert health_check.get("result") == "ok", "Supervisor health check failed"
|
|
logger.info("Supervisor health check result: %s", health_check)
|
|
# get resolution center info
|
|
resolution_info = shell_json("ha resolution info --no-progress --raw-json")
|
|
logger.info("Resolution center info: %s", resolution_info)
|
|
# check supervisor is healthy
|
|
unhealthy = resolution_info.get("data").get("unhealthy")
|
|
assert len(unhealthy) == 0, "Supervisor is unhealthy"
|
|
# check for unsupported entries
|
|
unsupported = resolution_info.get("data").get("unsupported")
|
|
assert len(unsupported) == 0, "Unsupported entries found"
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_supervisor_is_updated"])
|
|
def test_create_backup(shell_json, stash):
|
|
result = shell_json("ha backups new --no-progress --raw-json")
|
|
assert result.get("result") == "ok", f"Backup creation failed: {result}"
|
|
slug = result.get("data", {}).get("slug")
|
|
assert slug is not None
|
|
stash.update(slug=slug)
|
|
logger.info("Backup creation result: %s", result)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_app_install"])
|
|
def test_app_uninstall(shell_json):
|
|
result = shell_json("ha apps uninstall core_ssh --no-progress --raw-json")
|
|
assert result.get("result") == "ok", f"Core SSH app uninstall failed: {result}"
|
|
logger.info("Core SSH app uninstall result: %s", result)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_supervisor_is_updated"])
|
|
@pytest.mark.timeout(120)
|
|
def test_restart_supervisor(shell, shell_json):
|
|
result = shell_json("ha supervisor restart --no-progress --raw-json")
|
|
assert result.get("result") == "ok", f"Supervisor restart failed: {result}"
|
|
|
|
supervisor_ip = "\n".join(
|
|
shell.run_check("docker inspect --format='{{.NetworkSettings.Networks.bridge.IPAddress}}' hassio_supervisor")
|
|
)
|
|
|
|
while True:
|
|
try:
|
|
if shell_json(f"curl -sSL http://{supervisor_ip}/supervisor/ping").get("result") == "ok":
|
|
if shell_json("ha os info --no-progress --raw-json").get("result") == "ok":
|
|
break
|
|
except ExecutionError:
|
|
pass # avoid failure when the container is restarting
|
|
|
|
sleep(1)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_create_backup"])
|
|
def test_restore_backup(shell_json, stash):
|
|
result = shell_json(f"ha backups restore {stash.get('slug')} --app core_ssh --no-progress --raw-json")
|
|
assert result.get("result") == "ok", f"Backup restore failed: {result}"
|
|
logger.info("Backup restore result: %s", result)
|
|
|
|
app_info = shell_json("ha apps info core_ssh --no-progress --raw-json")
|
|
assert app_info.get("data", {}).get("version") is not None, "Core SSH app not installed"
|
|
assert app_info.get("data", {}).get("state") == "started", "Core SSH app not running"
|
|
logger.info("Core SSH app info: %s", app_info)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_create_backup"])
|
|
def test_restore_ssl_directory(shell_json, stash):
|
|
result = shell_json(f"ha backups restore {stash.get('slug')} --folders ssl --no-progress --raw-json")
|
|
assert result.get("result") == "ok", f"Backup restore failed: {result}"
|
|
logger.info("Backup restore result: %s", result)
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_start_supervisor"])
|
|
def test_no_apparmor_denies(shell):
|
|
"""Check there are no AppArmor denies in the logs raised during Supervisor tests."""
|
|
output = shell.run_check("journalctl -t audit | grep DENIED || true")
|
|
assert not output, f"AppArmor denies found: {output}"
|
|
|
|
|
|
@pytest.mark.dependency(depends=["test_start_supervisor"])
|
|
def test_kernel_not_tainted(shell):
|
|
"""Check if the kernel is not tainted - do it at the end of the
|
|
test suite to increase the chance of catching issues."""
|
|
output = shell.run_check("cat /proc/sys/kernel/tainted")
|
|
assert "\n".join(output) == "0", f"Kernel tainted: {output}"
|