feat: consolidate 26 tools into 10 tools with 90 actions

Refactor the entire tool layer to use the consolidated action pattern
(action: Literal[...] with QUERIES/MUTATIONS dicts). This reduces LLM
context from ~12k to ~5k tokens while adding ~60 new API capabilities.

New tools: unraid_info (19 actions), unraid_array (12), unraid_notifications (9),
unraid_users (8), unraid_keys (5). Rewritten: unraid_docker (15), unraid_vm (9),
unraid_storage (6), unraid_rclone (4), unraid_health (3).

Includes 129 tests across 10 test files, code review fixes for 16 issues
(severity ordering, PrefixedID regex, sensitive var redaction, etc.).

Removes tools/system.py (replaced by tools/info.py). Version bumped to 0.2.0.
This commit is contained in:
Jacob Magar
2026-02-08 08:49:47 -05:00
parent 67b775a9bc
commit 523b3edc76
33 changed files with 3538 additions and 1583 deletions

50
tests/conftest.py Normal file
View File

@@ -0,0 +1,50 @@
"""Shared test fixtures and helpers for Unraid MCP server tests."""
from typing import Any
from unittest.mock import AsyncMock, patch
import pytest
from fastmcp import FastMCP
@pytest.fixture
def mock_graphql_request() -> AsyncMock:
"""Fixture that patches make_graphql_request at the core module.
NOTE: Since each tool file imports make_graphql_request into its own
namespace, tool-specific tests should patch at the tool module level
(e.g., "unraid_mcp.tools.info.make_graphql_request") instead of using
this fixture. This fixture is useful for testing the core client
or for integration tests that reload modules.
"""
with patch("unraid_mcp.core.client.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def make_tool_fn(
module_path: str,
register_fn_name: str,
tool_name: str,
) -> Any:
"""Extract a tool function from a FastMCP registration for testing.
This wraps the repeated pattern of creating a test FastMCP instance,
registering a tool, and extracting the inner function. Centralizing
this avoids reliance on FastMCP's private `_tool_manager._tools` API
in every test file.
Args:
module_path: Dotted import path to the tool module (e.g., "unraid_mcp.tools.info")
register_fn_name: Name of the registration function (e.g., "register_info_tool")
tool_name: Name of the registered tool (e.g., "unraid_info")
Returns:
The async tool function callable
"""
import importlib
module = importlib.import_module(module_path)
register_fn = getattr(module, register_fn_name)
test_mcp = FastMCP("test")
register_fn(test_mcp)
return test_mcp._tool_manager._tools[tool_name].fn

77
tests/test_array.py Normal file
View File

@@ -0,0 +1,77 @@
"""Tests for unraid_array tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.array.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.array", "register_array_tool", "unraid_array")
class TestArrayValidation:
async def test_destructive_action_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
for action in ("start", "stop", "shutdown", "reboot"):
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action=action)
async def test_disk_action_requires_disk_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
for action in ("mount_disk", "unmount_disk", "clear_stats"):
with pytest.raises(ToolError, match="disk_id"):
await tool_fn(action=action)
class TestArrayActions:
async def test_start_array(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"setState": {"state": "STARTED"}}
tool_fn = _make_tool()
result = await tool_fn(action="start", confirm=True)
assert result["success"] is True
assert result["action"] == "start"
_mock_graphql.assert_called_once()
async def test_parity_start_with_correct(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"parityCheck": {"start": True}}
tool_fn = _make_tool()
result = await tool_fn(action="parity_start", correct=True)
assert result["success"] is True
call_args = _mock_graphql.call_args
assert call_args[0][1] == {"correct": True}
async def test_parity_history(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"array": {"parityCheckStatus": {"progress": 50}}}
tool_fn = _make_tool()
result = await tool_fn(action="parity_history")
assert result["success"] is True
async def test_mount_disk(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"mountArrayDisk": True}
tool_fn = _make_tool()
result = await tool_fn(action="mount_disk", disk_id="disk:1")
assert result["success"] is True
call_args = _mock_graphql.call_args
assert call_args[0][1] == {"id": "disk:1"}
async def test_shutdown(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"shutdown": True}
tool_fn = _make_tool()
result = await tool_fn(action="shutdown", confirm=True)
assert result["success"] is True
assert result["action"] == "shutdown"
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = RuntimeError("disk error")
tool_fn = _make_tool()
with pytest.raises(ToolError, match="disk error"):
await tool_fn(action="parity_history")

178
tests/test_docker.py Normal file
View File

@@ -0,0 +1,178 @@
"""Tests for unraid_docker tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
from unraid_mcp.tools.docker import find_container_by_identifier, get_available_container_names
# --- Unit tests for helpers ---
class TestFindContainerByIdentifier:
def test_by_exact_id(self) -> None:
containers = [{"id": "abc123", "names": ["plex"]}]
assert find_container_by_identifier("abc123", containers) == containers[0]
def test_by_exact_name(self) -> None:
containers = [{"id": "abc123", "names": ["plex"]}]
assert find_container_by_identifier("plex", containers) == containers[0]
def test_fuzzy_match(self) -> None:
containers = [{"id": "abc123", "names": ["plex-media-server"]}]
result = find_container_by_identifier("plex", containers)
assert result == containers[0]
def test_not_found(self) -> None:
containers = [{"id": "abc123", "names": ["plex"]}]
assert find_container_by_identifier("sonarr", containers) is None
def test_empty_list(self) -> None:
assert find_container_by_identifier("plex", []) is None
class TestGetAvailableContainerNames:
def test_extracts_names(self) -> None:
containers = [
{"names": ["plex"]},
{"names": ["sonarr", "sonarr-v3"]},
]
names = get_available_container_names(containers)
assert "plex" in names
assert "sonarr" in names
assert "sonarr-v3" in names
def test_empty(self) -> None:
assert get_available_container_names([]) == []
# --- Integration tests ---
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.docker.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.docker", "register_docker_tool", "unraid_docker")
class TestDockerValidation:
async def test_remove_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="remove", container_id="abc123")
async def test_container_actions_require_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
for action in ("start", "stop", "details", "logs", "pause", "unpause"):
with pytest.raises(ToolError, match="container_id"):
await tool_fn(action=action)
async def test_network_details_requires_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="network_id"):
await tool_fn(action="network_details")
class TestDockerActions:
async def test_list(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"docker": {"containers": [{"id": "c1", "names": ["plex"], "state": "running"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert len(result["containers"]) == 1
async def test_start_container(self, _mock_graphql: AsyncMock) -> None:
# First call resolves ID, second performs start
_mock_graphql.side_effect = [
{"docker": {"containers": [{"id": "abc123def456" * 4 + "abcd1234abcd1234:local", "names": ["plex"]}]}},
{"docker": {"start": {"id": "abc123def456" * 4 + "abcd1234abcd1234:local", "state": "running"}}},
]
tool_fn = _make_tool()
result = await tool_fn(action="start", container_id="plex")
assert result["success"] is True
async def test_networks(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"dockerNetworks": [{"id": "net:1", "name": "bridge"}]}
tool_fn = _make_tool()
result = await tool_fn(action="networks")
assert len(result["networks"]) == 1
async def test_port_conflicts(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"docker": {"portConflicts": []}}
tool_fn = _make_tool()
result = await tool_fn(action="port_conflicts")
assert result["port_conflicts"] == []
async def test_check_updates(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"docker": {"containerUpdateStatuses": [{"id": "c1", "name": "plex", "updateAvailable": True}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="check_updates")
assert len(result["update_statuses"]) == 1
async def test_idempotent_start(self, _mock_graphql: AsyncMock) -> None:
# Resolve + idempotent success
_mock_graphql.side_effect = [
{"docker": {"containers": [{"id": "a" * 64 + ":local", "names": ["plex"]}]}},
{"idempotent_success": True, "docker": {}},
]
tool_fn = _make_tool()
result = await tool_fn(action="start", container_id="plex")
assert result["idempotent"] is True
async def test_restart(self, _mock_graphql: AsyncMock) -> None:
cid = "a" * 64 + ":local"
_mock_graphql.side_effect = [
{"docker": {"containers": [{"id": cid, "names": ["plex"]}]}},
{"docker": {"stop": {"id": cid, "state": "exited"}}},
{"docker": {"start": {"id": cid, "state": "running"}}},
]
tool_fn = _make_tool()
result = await tool_fn(action="restart", container_id="plex")
assert result["success"] is True
assert result["action"] == "restart"
async def test_restart_idempotent_stop(self, _mock_graphql: AsyncMock) -> None:
cid = "a" * 64 + ":local"
_mock_graphql.side_effect = [
{"docker": {"containers": [{"id": cid, "names": ["plex"]}]}},
{"idempotent_success": True},
{"docker": {"start": {"id": cid, "state": "running"}}},
]
tool_fn = _make_tool()
result = await tool_fn(action="restart", container_id="plex")
assert result["success"] is True
assert "note" in result
async def test_update_all(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"docker": {"updateAllContainers": [{"id": "c1", "state": "running"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="update_all")
assert result["success"] is True
assert len(result["containers"]) == 1
async def test_remove_with_confirm(self, _mock_graphql: AsyncMock) -> None:
cid = "a" * 64 + ":local"
_mock_graphql.side_effect = [
{"docker": {"containers": [{"id": cid, "names": ["old-app"]}]}},
{"docker": {"removeContainer": True}},
]
tool_fn = _make_tool()
result = await tool_fn(action="remove", container_id="old-app", confirm=True)
assert result["success"] is True
async def test_generic_exception_wraps_in_tool_error(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = RuntimeError("unexpected failure")
tool_fn = _make_tool()
with pytest.raises(ToolError, match="unexpected failure"):
await tool_fn(action="list")

126
tests/test_health.py Normal file
View File

@@ -0,0 +1,126 @@
"""Tests for unraid_health tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.health.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.health", "register_health_tool", "unraid_health")
class TestHealthValidation:
async def test_invalid_action(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="Invalid action"):
await tool_fn(action="invalid")
class TestHealthActions:
async def test_test_connection(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"online": True}
tool_fn = _make_tool()
result = await tool_fn(action="test_connection")
assert result["status"] == "connected"
assert result["online"] is True
assert "latency_ms" in result
async def test_check_healthy(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"info": {
"machineId": "abc123",
"time": "2026-02-08T12:00:00Z",
"versions": {"unraid": "7.2.0"},
"os": {"uptime": 86400},
},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 0, "warning": 0, "total": 3}}
},
"docker": {
"containers": [{"id": "c1", "state": "running", "status": "Up 2 days"}]
},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "healthy"
assert "api_latency_ms" in result
async def test_check_warning_on_alerts(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"info": {"machineId": "abc", "versions": {"unraid": "7.2"}, "os": {"uptime": 100}},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 3, "warning": 0, "total": 3}}
},
"docker": {"containers": []},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "warning"
assert any("alert" in i for i in result.get("issues", []))
async def test_check_no_data(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "unhealthy"
async def test_check_api_error(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = Exception("Connection refused")
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "unhealthy"
assert "Connection refused" in result["error"]
async def test_check_severity_never_downgrades(self, _mock_graphql: AsyncMock) -> None:
"""Degraded from missing info should not be overwritten by warning from alerts."""
_mock_graphql.return_value = {
"info": {},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 5, "warning": 0, "total": 5}}
},
"docker": {"containers": []},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
# Missing info escalates to "degraded"; alerts only escalate to "warning"
# Severity should stay at "degraded" (not downgrade to "warning")
assert result["status"] == "degraded"
async def test_diagnose_wraps_exception(self, _mock_graphql: AsyncMock) -> None:
"""When _diagnose_subscriptions raises, tool wraps in ToolError."""
tool_fn = _make_tool()
with patch(
"unraid_mcp.tools.health._diagnose_subscriptions",
side_effect=RuntimeError("broken"),
):
with pytest.raises(ToolError, match="broken"):
await tool_fn(action="diagnose")
async def test_diagnose_import_error_internal(self) -> None:
"""_diagnose_subscriptions catches ImportError and returns error dict."""
import builtins
from unraid_mcp.tools.health import _diagnose_subscriptions
real_import = builtins.__import__
def fail_subscriptions(name, *args, **kwargs):
if "subscriptions" in name:
raise ImportError("no module")
return real_import(name, *args, **kwargs)
with patch("builtins.__import__", side_effect=fail_subscriptions):
result = await _diagnose_subscriptions()
assert "error" in result

159
tests/test_info.py Normal file
View File

@@ -0,0 +1,159 @@
"""Tests for unraid_info tool."""
from unittest.mock import AsyncMock, patch
import pytest
from unraid_mcp.core.exceptions import ToolError
from unraid_mcp.tools.info import (
_analyze_disk_health,
_process_array_status,
_process_system_info,
)
# --- Unit tests for helper functions ---
class TestProcessSystemInfo:
def test_processes_os_info(self) -> None:
raw = {
"os": {"distro": "Unraid", "release": "7.2", "platform": "linux", "arch": "x86_64", "hostname": "tower", "uptime": 3600},
"cpu": {"manufacturer": "AMD", "brand": "Ryzen", "cores": 8, "threads": 16},
}
result = _process_system_info(raw)
assert "summary" in result
assert "details" in result
assert result["summary"]["hostname"] == "tower"
assert "AMD" in result["summary"]["cpu"]
def test_handles_missing_fields(self) -> None:
result = _process_system_info({})
assert result["summary"] == {"memory_summary": "Memory information not available."}
def test_processes_memory_layout(self) -> None:
raw = {"memory": {"layout": [{"bank": "0", "type": "DDR4", "clockSpeed": 3200, "manufacturer": "G.Skill", "partNum": "XYZ"}]}}
result = _process_system_info(raw)
assert len(result["summary"]["memory_layout_details"]) == 1
class TestAnalyzeDiskHealth:
def test_counts_healthy_disks(self) -> None:
disks = [{"status": "DISK_OK"}, {"status": "DISK_OK"}]
result = _analyze_disk_health(disks)
assert result["healthy"] == 2
def test_counts_failed_disks(self) -> None:
disks = [{"status": "DISK_DSBL"}, {"status": "DISK_INVALID"}]
result = _analyze_disk_health(disks)
assert result["failed"] == 2
def test_counts_warning_disks(self) -> None:
disks = [{"status": "DISK_OK", "warning": 45}]
result = _analyze_disk_health(disks)
assert result["warning"] == 1
def test_counts_missing_disks(self) -> None:
disks = [{"status": "DISK_NP"}]
result = _analyze_disk_health(disks)
assert result["missing"] == 1
def test_empty_list(self) -> None:
result = _analyze_disk_health([])
assert result["healthy"] == 0
class TestProcessArrayStatus:
def test_basic_array(self) -> None:
raw = {
"state": "STARTED",
"capacity": {"kilobytes": {"free": "1048576", "used": "524288", "total": "1572864"}},
"parities": [{"status": "DISK_OK"}],
"disks": [{"status": "DISK_OK"}],
"caches": [],
}
result = _process_array_status(raw)
assert result["summary"]["state"] == "STARTED"
assert result["summary"]["overall_health"] == "HEALTHY"
def test_degraded_array(self) -> None:
raw = {
"state": "STARTED",
"parities": [],
"disks": [{"status": "DISK_NP"}],
"caches": [],
}
result = _process_array_status(raw)
assert result["summary"]["overall_health"] == "DEGRADED"
# --- Integration tests for the tool function ---
class TestUnraidInfoTool:
@pytest.fixture
def _mock_graphql(self) -> AsyncMock:
with patch("unraid_mcp.tools.info.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
@pytest.mark.asyncio
async def test_overview_action(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"info": {
"os": {"distro": "Unraid", "release": "7.2", "platform": "linux", "arch": "x86_64", "hostname": "test"},
"cpu": {"manufacturer": "Intel", "brand": "i7", "cores": 4, "threads": 8},
}
}
# Import and call the inner function by simulating registration
from fastmcp import FastMCP
test_mcp = FastMCP("test")
from unraid_mcp.tools.info import register_info_tool
register_info_tool(test_mcp)
tool_fn = test_mcp._tool_manager._tools["unraid_info"].fn
result = await tool_fn(action="overview")
assert "summary" in result
_mock_graphql.assert_called_once()
@pytest.mark.asyncio
async def test_ups_device_requires_device_id(self, _mock_graphql: AsyncMock) -> None:
from fastmcp import FastMCP
test_mcp = FastMCP("test")
from unraid_mcp.tools.info import register_info_tool
register_info_tool(test_mcp)
tool_fn = test_mcp._tool_manager._tools["unraid_info"].fn
with pytest.raises(ToolError, match="device_id is required"):
await tool_fn(action="ups_device")
@pytest.mark.asyncio
async def test_network_action(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"network": {"id": "net:1", "accessUrls": []}}
from fastmcp import FastMCP
test_mcp = FastMCP("test")
from unraid_mcp.tools.info import register_info_tool
register_info_tool(test_mcp)
tool_fn = test_mcp._tool_manager._tools["unraid_info"].fn
result = await tool_fn(action="network")
assert result["id"] == "net:1"
@pytest.mark.asyncio
async def test_connect_action(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"connect": {"status": "connected", "sandbox": False, "flashGuid": "abc123"}
}
from fastmcp import FastMCP
test_mcp = FastMCP("test")
from unraid_mcp.tools.info import register_info_tool
register_info_tool(test_mcp)
tool_fn = test_mcp._tool_manager._tools["unraid_info"].fn
result = await tool_fn(action="connect")
assert result["status"] == "connected"
@pytest.mark.asyncio
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = RuntimeError("unexpected")
from fastmcp import FastMCP
test_mcp = FastMCP("test")
from unraid_mcp.tools.info import register_info_tool
register_info_tool(test_mcp)
tool_fn = test_mcp._tool_manager._tools["unraid_info"].fn
with pytest.raises(ToolError, match="unexpected"):
await tool_fn(action="online")

90
tests/test_keys.py Normal file
View File

@@ -0,0 +1,90 @@
"""Tests for unraid_keys tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.keys.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.keys", "register_keys_tool", "unraid_keys")
class TestKeysValidation:
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="delete", key_id="k:1")
async def test_get_requires_key_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="key_id"):
await tool_fn(action="get")
async def test_create_requires_name(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="name"):
await tool_fn(action="create")
async def test_update_requires_key_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="key_id"):
await tool_fn(action="update")
async def test_delete_requires_key_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="key_id"):
await tool_fn(action="delete", confirm=True)
class TestKeysActions:
async def test_list(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"apiKeys": [{"id": "k:1", "name": "mcp-key", "roles": ["admin"]}]
}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert len(result["keys"]) == 1
async def test_get(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"apiKey": {"id": "k:1", "name": "mcp-key", "roles": ["admin"]}}
tool_fn = _make_tool()
result = await tool_fn(action="get", key_id="k:1")
assert result["name"] == "mcp-key"
async def test_create(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"createApiKey": {"id": "k:new", "name": "new-key", "key": "secret123", "roles": []}
}
tool_fn = _make_tool()
result = await tool_fn(action="create", name="new-key")
assert result["success"] is True
assert result["key"]["name"] == "new-key"
async def test_create_with_roles(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"createApiKey": {"id": "k:new", "name": "admin-key", "key": "secret", "roles": ["admin"]}
}
tool_fn = _make_tool()
result = await tool_fn(action="create", name="admin-key", roles=["admin"])
assert result["success"] is True
async def test_update(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"updateApiKey": {"id": "k:1", "name": "renamed", "roles": []}}
tool_fn = _make_tool()
result = await tool_fn(action="update", key_id="k:1", name="renamed")
assert result["success"] is True
async def test_delete(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"deleteApiKeys": True}
tool_fn = _make_tool()
result = await tool_fn(action="delete", key_id="k:1", confirm=True)
assert result["success"] is True

145
tests/test_notifications.py Normal file
View File

@@ -0,0 +1,145 @@
"""Tests for unraid_notifications tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn(
"unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"
)
class TestNotificationsValidation:
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="delete", notification_id="n:1", notification_type="UNREAD")
async def test_delete_archived_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="delete_archived")
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="requires title"):
await tool_fn(action="create")
async def test_archive_requires_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="notification_id"):
await tool_fn(action="archive")
async def test_delete_requires_id_and_type(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="requires notification_id"):
await tool_fn(action="delete", confirm=True)
class TestNotificationsActions:
async def test_overview(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"notifications": {
"overview": {
"unread": {"info": 5, "warning": 2, "alert": 0, "total": 7},
"archive": {"info": 10, "warning": 1, "alert": 0, "total": 11},
}
}
}
tool_fn = _make_tool()
result = await tool_fn(action="overview")
assert result["unread"]["total"] == 7
async def test_list(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"notifications": {
"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]
}
}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert len(result["notifications"]) == 1
async def test_warnings(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"notifications": {"warningsAndAlerts": [{"id": "n:1", "importance": "WARNING"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="warnings")
assert len(result["warnings"]) == 1
async def test_create(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"notifications": {"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}}
}
tool_fn = _make_tool()
result = await tool_fn(
action="create",
title="Test",
subject="Test Subject",
description="Test Desc",
importance="info",
)
assert result["success"] is True
async def test_archive_notification(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"notifications": {"archiveNotification": True}}
tool_fn = _make_tool()
result = await tool_fn(action="archive", notification_id="n:1")
assert result["success"] is True
async def test_delete_with_confirm(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"notifications": {"deleteNotification": True}}
tool_fn = _make_tool()
result = await tool_fn(
action="delete",
notification_id="n:1",
notification_type="unread",
confirm=True,
)
assert result["success"] is True
async def test_archive_all(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"notifications": {"archiveAll": True}}
tool_fn = _make_tool()
result = await tool_fn(action="archive_all")
assert result["success"] is True
async def test_unread_notification(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"notifications": {"unreadNotification": True}}
tool_fn = _make_tool()
result = await tool_fn(action="unread", notification_id="n:1")
assert result["success"] is True
assert result["action"] == "unread"
async def test_list_with_importance_filter(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"notifications": {
"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]
}
}
tool_fn = _make_tool()
result = await tool_fn(action="list", importance="warning", limit=10, offset=5)
assert len(result["notifications"]) == 1
call_args = _mock_graphql.call_args
filter_var = call_args[0][1]["filter"]
assert filter_var["importance"] == "WARNING"
assert filter_var["limit"] == 10
assert filter_var["offset"] == 5
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = RuntimeError("boom")
tool_fn = _make_tool()
with pytest.raises(ToolError, match="boom"):
await tool_fn(action="overview")

102
tests/test_rclone.py Normal file
View File

@@ -0,0 +1,102 @@
"""Tests for unraid_rclone tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.rclone.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone")
class TestRcloneValidation:
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="delete_remote", name="gdrive")
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="requires name"):
await tool_fn(action="create_remote")
async def test_delete_requires_name(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="name is required"):
await tool_fn(action="delete_remote", confirm=True)
class TestRcloneActions:
async def test_list_remotes(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"rclone": {"remotes": [{"name": "gdrive", "type": "drive"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="list_remotes")
assert len(result["remotes"]) == 1
async def test_config_form(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"rclone": {"configForm": {"id": "form:1", "dataSchema": {}, "uiSchema": {}}}
}
tool_fn = _make_tool()
result = await tool_fn(action="config_form")
assert result["id"] == "form:1"
async def test_config_form_with_provider(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"rclone": {"configForm": {"id": "form:s3", "dataSchema": {}, "uiSchema": {}}}
}
tool_fn = _make_tool()
result = await tool_fn(action="config_form", provider_type="s3")
assert result["id"] == "form:s3"
call_args = _mock_graphql.call_args
assert call_args[0][1] == {"formOptions": {"providerType": "s3"}}
async def test_create_remote(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"rclone": {"createRCloneRemote": {"name": "newremote", "type": "s3"}}
}
tool_fn = _make_tool()
result = await tool_fn(
action="create_remote",
name="newremote",
provider_type="s3",
config_data={"bucket": "mybucket"},
)
assert result["success"] is True
async def test_create_remote_with_empty_config(self, _mock_graphql: AsyncMock) -> None:
"""Empty config_data dict should be accepted (not rejected by truthiness)."""
_mock_graphql.return_value = {
"rclone": {"createRCloneRemote": {"name": "ftp-remote", "type": "ftp"}}
}
tool_fn = _make_tool()
result = await tool_fn(
action="create_remote",
name="ftp-remote",
provider_type="ftp",
config_data={},
)
assert result["success"] is True
async def test_delete_remote(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"rclone": {"deleteRCloneRemote": True}}
tool_fn = _make_tool()
result = await tool_fn(action="delete_remote", name="gdrive", confirm=True)
assert result["success"] is True
async def test_delete_remote_failure(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"rclone": {"deleteRCloneRemote": False}}
tool_fn = _make_tool()
with pytest.raises(ToolError, match="Failed to delete"):
await tool_fn(action="delete_remote", name="gdrive", confirm=True)

105
tests/test_storage.py Normal file
View File

@@ -0,0 +1,105 @@
"""Tests for unraid_storage tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
from unraid_mcp.tools.storage import format_bytes
# --- Unit tests for helpers ---
class TestFormatBytes:
def test_none(self) -> None:
assert format_bytes(None) == "N/A"
def test_bytes(self) -> None:
assert format_bytes(512) == "512.00 B"
def test_kilobytes(self) -> None:
assert format_bytes(2048) == "2.00 KB"
def test_megabytes(self) -> None:
assert format_bytes(1048576) == "1.00 MB"
def test_gigabytes(self) -> None:
assert format_bytes(1073741824) == "1.00 GB"
def test_terabytes(self) -> None:
assert format_bytes(1099511627776) == "1.00 TB"
# --- Integration tests ---
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.storage.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.storage", "register_storage_tool", "unraid_storage")
class TestStorageValidation:
async def test_disk_details_requires_disk_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="disk_id"):
await tool_fn(action="disk_details")
async def test_logs_requires_log_path(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="log_path"):
await tool_fn(action="logs")
class TestStorageActions:
async def test_shares(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"shares": [{"id": "s:1", "name": "media"}, {"id": "s:2", "name": "backups"}]
}
tool_fn = _make_tool()
result = await tool_fn(action="shares")
assert len(result["shares"]) == 2
async def test_disks(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"disks": [{"id": "d:1", "device": "sda"}]}
tool_fn = _make_tool()
result = await tool_fn(action="disks")
assert len(result["disks"]) == 1
async def test_disk_details(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"disk": {"id": "d:1", "device": "sda", "name": "WD", "serialNum": "SN1", "size": 1073741824, "temperature": 35}
}
tool_fn = _make_tool()
result = await tool_fn(action="disk_details", disk_id="d:1")
assert result["summary"]["temperature"] == "35C"
assert "1.00 GB" in result["summary"]["size_formatted"]
async def test_disk_details_not_found(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"disk": None}
tool_fn = _make_tool()
with pytest.raises(ToolError, match="not found"):
await tool_fn(action="disk_details", disk_id="d:missing")
async def test_unassigned(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"unassignedDevices": []}
tool_fn = _make_tool()
result = await tool_fn(action="unassigned")
assert result["devices"] == []
async def test_log_files(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"logFiles": [{"name": "syslog", "path": "/var/log/syslog"}]}
tool_fn = _make_tool()
result = await tool_fn(action="log_files")
assert len(result["log_files"]) == 1
async def test_logs(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "log line", "totalLines": 1}}
tool_fn = _make_tool()
result = await tool_fn(action="logs", log_path="/var/log/syslog")
assert result["content"] == "log line"

100
tests/test_users.py Normal file
View File

@@ -0,0 +1,100 @@
"""Tests for unraid_users tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.users.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.users", "register_users_tool", "unraid_users")
class TestUsersValidation:
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="delete", user_id="u:1")
async def test_get_requires_user_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="user_id"):
await tool_fn(action="get")
async def test_add_requires_name_and_password(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="requires name and password"):
await tool_fn(action="add")
async def test_delete_requires_user_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="user_id"):
await tool_fn(action="delete", confirm=True)
class TestUsersActions:
async def test_me(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"me": {"id": "u:1", "name": "root", "role": "ADMIN"}}
tool_fn = _make_tool()
result = await tool_fn(action="me")
assert result["name"] == "root"
async def test_list(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"users": [{"id": "u:1", "name": "root"}, {"id": "u:2", "name": "guest"}]
}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert len(result["users"]) == 2
async def test_get(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"user": {"id": "u:1", "name": "root", "role": "ADMIN"}}
tool_fn = _make_tool()
result = await tool_fn(action="get", user_id="u:1")
assert result["name"] == "root"
async def test_add(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"addUser": {"id": "u:3", "name": "newuser", "role": "USER"}}
tool_fn = _make_tool()
result = await tool_fn(action="add", name="newuser", password="pass123")
assert result["success"] is True
async def test_add_with_role(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"addUser": {"id": "u:3", "name": "admin2", "role": "ADMIN"}}
tool_fn = _make_tool()
result = await tool_fn(action="add", name="admin2", password="pass123", role="admin")
assert result["success"] is True
call_args = _mock_graphql.call_args
assert call_args[0][1]["input"]["role"] == "ADMIN"
async def test_delete(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"deleteUser": True}
tool_fn = _make_tool()
result = await tool_fn(action="delete", user_id="u:2", confirm=True)
assert result["success"] is True
async def test_cloud(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"cloud": {"status": "connected", "apiKey": "***"}}
tool_fn = _make_tool()
result = await tool_fn(action="cloud")
assert result["status"] == "connected"
async def test_remote_access(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"remoteAccess": {"enabled": True, "url": "https://example.com"}}
tool_fn = _make_tool()
result = await tool_fn(action="remote_access")
assert result["enabled"] is True
async def test_origins(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"allowedOrigins": ["http://localhost", "https://example.com"]}
tool_fn = _make_tool()
result = await tool_fn(action="origins")
assert len(result["origins"]) == 2

109
tests/test_vm.py Normal file
View File

@@ -0,0 +1,109 @@
"""Tests for unraid_vm tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm")
class TestVmValidation:
async def test_actions_except_list_require_vm_id(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
for action in ("details", "start", "stop", "pause", "resume", "reboot"):
with pytest.raises(ToolError, match="vm_id"):
await tool_fn(action=action)
async def test_destructive_actions_require_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
for action in ("force_stop", "reset"):
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action=action, vm_id="uuid-1")
async def test_destructive_vm_id_check_before_confirm(self, _mock_graphql: AsyncMock) -> None:
"""Destructive actions without vm_id should fail on confirm first."""
tool_fn = _make_tool()
with pytest.raises(ToolError, match="destructive"):
await tool_fn(action="force_stop")
class TestVmActions:
async def test_list(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"vms": {
"domains": [
{"id": "vm:1", "name": "Windows 11", "state": "RUNNING", "uuid": "uuid-1"},
]
}
}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert len(result["vms"]) == 1
assert result["vms"][0]["name"] == "Windows 11"
async def test_list_empty(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"vms": {"domains": []}}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert result["vms"] == []
async def test_list_no_vms_key(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {}
tool_fn = _make_tool()
result = await tool_fn(action="list")
assert result["vms"] == []
async def test_details_by_uuid(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="details", vm_id="uuid-1")
assert result["name"] == "Win11"
async def test_details_by_name(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
}
tool_fn = _make_tool()
result = await tool_fn(action="details", vm_id="Win11")
assert result["uuid"] == "uuid-1"
async def test_details_not_found(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
}
tool_fn = _make_tool()
with pytest.raises(ToolError, match="not found"):
await tool_fn(action="details", vm_id="nonexistent")
async def test_start_vm(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"vm": {"start": True}}
tool_fn = _make_tool()
result = await tool_fn(action="start", vm_id="uuid-1")
assert result["success"] is True
assert result["action"] == "start"
async def test_force_stop(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"vm": {"forceStop": True}}
tool_fn = _make_tool()
result = await tool_fn(action="force_stop", vm_id="uuid-1", confirm=True)
assert result["success"] is True
assert result["action"] == "force_stop"
async def test_mutation_unexpected_response(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"vm": {}}
tool_fn = _make_tool()
with pytest.raises(ToolError, match="Failed to start"):
await tool_fn(action="start", vm_id="uuid-1")