mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-01 16:04:24 -08:00
feat: harden API safety and expand command docs with full test coverage
This commit is contained in:
0
tests/http/__init__.py
Normal file
0
tests/http/__init__.py
Normal file
1187
tests/http/test_request_construction.py
Normal file
1187
tests/http/test_request_construction.py
Normal file
File diff suppressed because it is too large
Load Diff
0
tests/integration/__init__.py
Normal file
0
tests/integration/__init__.py
Normal file
1010
tests/integration/test_subscriptions.py
Normal file
1010
tests/integration/test_subscriptions.py
Normal file
File diff suppressed because it is too large
Load Diff
0
tests/safety/__init__.py
Normal file
0
tests/safety/__init__.py
Normal file
324
tests/safety/test_destructive_guards.py
Normal file
324
tests/safety/test_destructive_guards.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""Safety audit tests for destructive action confirmation guards.
|
||||
|
||||
Verifies that all destructive operations across every tool require
|
||||
explicit `confirm=True` before execution, and that the DESTRUCTIVE_ACTIONS
|
||||
registries are complete and consistent.
|
||||
"""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
# Import DESTRUCTIVE_ACTIONS sets from every tool module that defines one
|
||||
from unraid_mcp.tools.docker import DESTRUCTIVE_ACTIONS as DOCKER_DESTRUCTIVE
|
||||
from unraid_mcp.tools.docker import MUTATIONS as DOCKER_MUTATIONS
|
||||
from unraid_mcp.tools.keys import DESTRUCTIVE_ACTIONS as KEYS_DESTRUCTIVE
|
||||
from unraid_mcp.tools.keys import MUTATIONS as KEYS_MUTATIONS
|
||||
from unraid_mcp.tools.notifications import DESTRUCTIVE_ACTIONS as NOTIF_DESTRUCTIVE
|
||||
from unraid_mcp.tools.notifications import MUTATIONS as NOTIF_MUTATIONS
|
||||
from unraid_mcp.tools.rclone import DESTRUCTIVE_ACTIONS as RCLONE_DESTRUCTIVE
|
||||
from unraid_mcp.tools.rclone import MUTATIONS as RCLONE_MUTATIONS
|
||||
from unraid_mcp.tools.virtualization import DESTRUCTIVE_ACTIONS as VM_DESTRUCTIVE
|
||||
from unraid_mcp.tools.virtualization import MUTATIONS as VM_MUTATIONS
|
||||
|
||||
# Centralized import for make_tool_fn helper
|
||||
# conftest.py sits in tests/ and is importable without __init__.py
|
||||
from conftest import make_tool_fn
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Known destructive actions registry (ground truth for this audit)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Every destructive action in the codebase, keyed by (tool_module, tool_name)
|
||||
KNOWN_DESTRUCTIVE: dict[str, dict[str, set[str]]] = {
|
||||
"docker": {
|
||||
"module": "unraid_mcp.tools.docker",
|
||||
"register_fn": "register_docker_tool",
|
||||
"tool_name": "unraid_docker",
|
||||
"actions": {"remove"},
|
||||
"runtime_set": DOCKER_DESTRUCTIVE,
|
||||
},
|
||||
"vm": {
|
||||
"module": "unraid_mcp.tools.virtualization",
|
||||
"register_fn": "register_vm_tool",
|
||||
"tool_name": "unraid_vm",
|
||||
"actions": {"force_stop", "reset"},
|
||||
"runtime_set": VM_DESTRUCTIVE,
|
||||
},
|
||||
"notifications": {
|
||||
"module": "unraid_mcp.tools.notifications",
|
||||
"register_fn": "register_notifications_tool",
|
||||
"tool_name": "unraid_notifications",
|
||||
"actions": {"delete", "delete_archived"},
|
||||
"runtime_set": NOTIF_DESTRUCTIVE,
|
||||
},
|
||||
"rclone": {
|
||||
"module": "unraid_mcp.tools.rclone",
|
||||
"register_fn": "register_rclone_tool",
|
||||
"tool_name": "unraid_rclone",
|
||||
"actions": {"delete_remote"},
|
||||
"runtime_set": RCLONE_DESTRUCTIVE,
|
||||
},
|
||||
"keys": {
|
||||
"module": "unraid_mcp.tools.keys",
|
||||
"register_fn": "register_keys_tool",
|
||||
"tool_name": "unraid_keys",
|
||||
"actions": {"delete"},
|
||||
"runtime_set": KEYS_DESTRUCTIVE,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registry validation: DESTRUCTIVE_ACTIONS sets match ground truth
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDestructiveActionRegistries:
|
||||
"""Verify that DESTRUCTIVE_ACTIONS sets in source code match the audit."""
|
||||
|
||||
@pytest.mark.parametrize("tool_key", list(KNOWN_DESTRUCTIVE.keys()))
|
||||
def test_destructive_set_matches_audit(self, tool_key: str) -> None:
|
||||
"""Each tool's DESTRUCTIVE_ACTIONS must exactly match the audited set."""
|
||||
info = KNOWN_DESTRUCTIVE[tool_key]
|
||||
assert info["runtime_set"] == info["actions"], (
|
||||
f"{tool_key}: DESTRUCTIVE_ACTIONS is {info['runtime_set']}, "
|
||||
f"expected {info['actions']}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("tool_key", list(KNOWN_DESTRUCTIVE.keys()))
|
||||
def test_destructive_actions_are_valid_mutations(self, tool_key: str) -> None:
|
||||
"""Every destructive action must correspond to an actual mutation."""
|
||||
info = KNOWN_DESTRUCTIVE[tool_key]
|
||||
mutations_map = {
|
||||
"docker": DOCKER_MUTATIONS,
|
||||
"vm": VM_MUTATIONS,
|
||||
"notifications": NOTIF_MUTATIONS,
|
||||
"rclone": RCLONE_MUTATIONS,
|
||||
"keys": KEYS_MUTATIONS,
|
||||
}
|
||||
mutations = mutations_map[tool_key]
|
||||
for action in info["actions"]:
|
||||
assert action in mutations, (
|
||||
f"{tool_key}: destructive action '{action}' is not in MUTATIONS"
|
||||
)
|
||||
|
||||
def test_no_delete_or_remove_mutations_missing_from_destructive(self) -> None:
|
||||
"""Any mutation with 'delete' or 'remove' in its name should be destructive."""
|
||||
all_mutations = {
|
||||
"docker": DOCKER_MUTATIONS,
|
||||
"vm": VM_MUTATIONS,
|
||||
"notifications": NOTIF_MUTATIONS,
|
||||
"rclone": RCLONE_MUTATIONS,
|
||||
"keys": KEYS_MUTATIONS,
|
||||
}
|
||||
all_destructive = {
|
||||
"docker": DOCKER_DESTRUCTIVE,
|
||||
"vm": VM_DESTRUCTIVE,
|
||||
"notifications": NOTIF_DESTRUCTIVE,
|
||||
"rclone": RCLONE_DESTRUCTIVE,
|
||||
"keys": KEYS_DESTRUCTIVE,
|
||||
}
|
||||
missing: list[str] = []
|
||||
for tool_key, mutations in all_mutations.items():
|
||||
destructive = all_destructive[tool_key]
|
||||
for action_name in mutations:
|
||||
if ("delete" in action_name or "remove" in action_name) and action_name not in destructive:
|
||||
missing.append(f"{tool_key}/{action_name}")
|
||||
assert not missing, (
|
||||
f"Mutations with 'delete'/'remove' not in DESTRUCTIVE_ACTIONS: {missing}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Confirmation guard tests: calling without confirm=True raises ToolError
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Build parametrized test cases: (tool_key, action, kwargs_without_confirm)
|
||||
# Each destructive action needs the minimum required params (minus confirm)
|
||||
_DESTRUCTIVE_TEST_CASES: list[tuple[str, str, dict]] = [
|
||||
# Docker
|
||||
("docker", "remove", {"container_id": "abc123"}),
|
||||
# VM
|
||||
("vm", "force_stop", {"vm_id": "test-vm-uuid"}),
|
||||
("vm", "reset", {"vm_id": "test-vm-uuid"}),
|
||||
# Notifications
|
||||
("notifications", "delete", {"notification_id": "notif-1", "notification_type": "UNREAD"}),
|
||||
("notifications", "delete_archived", {}),
|
||||
# RClone
|
||||
("rclone", "delete_remote", {"name": "my-remote"}),
|
||||
# Keys
|
||||
("keys", "delete", {"key_id": "key-123"}),
|
||||
]
|
||||
|
||||
|
||||
_CASE_IDS = [f"{c[0]}/{c[1]}" for c in _DESTRUCTIVE_TEST_CASES]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_docker_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.docker.make_graphql_request", new_callable=AsyncMock) as m:
|
||||
yield m
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_vm_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock) as m:
|
||||
yield m
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_notif_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock) as m:
|
||||
yield m
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_rclone_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.rclone.make_graphql_request", new_callable=AsyncMock) as m:
|
||||
yield m
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_keys_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.keys.make_graphql_request", new_callable=AsyncMock) as m:
|
||||
yield m
|
||||
|
||||
|
||||
# Map tool_key -> (fixture name, module path, register fn, tool name)
|
||||
_TOOL_REGISTRY = {
|
||||
"docker": ("unraid_mcp.tools.docker", "register_docker_tool", "unraid_docker"),
|
||||
"vm": ("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm"),
|
||||
"notifications": ("unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"),
|
||||
"rclone": ("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone"),
|
||||
"keys": ("unraid_mcp.tools.keys", "register_keys_tool", "unraid_keys"),
|
||||
}
|
||||
|
||||
|
||||
class TestConfirmationGuards:
|
||||
"""Every destructive action must reject calls without confirm=True."""
|
||||
|
||||
@pytest.mark.parametrize("tool_key,action,kwargs", _DESTRUCTIVE_TEST_CASES, ids=_CASE_IDS)
|
||||
async def test_rejects_without_confirm(
|
||||
self,
|
||||
tool_key: str,
|
||||
action: str,
|
||||
kwargs: dict,
|
||||
_mock_docker_graphql: AsyncMock,
|
||||
_mock_vm_graphql: AsyncMock,
|
||||
_mock_notif_graphql: AsyncMock,
|
||||
_mock_rclone_graphql: AsyncMock,
|
||||
_mock_keys_graphql: AsyncMock,
|
||||
) -> None:
|
||||
"""Calling a destructive action without confirm=True must raise ToolError."""
|
||||
module_path, register_fn, tool_name = _TOOL_REGISTRY[tool_key]
|
||||
tool_fn = make_tool_fn(module_path, register_fn, tool_name)
|
||||
|
||||
with pytest.raises(ToolError, match="confirm=True"):
|
||||
await tool_fn(action=action, **kwargs)
|
||||
|
||||
@pytest.mark.parametrize("tool_key,action,kwargs", _DESTRUCTIVE_TEST_CASES, ids=_CASE_IDS)
|
||||
async def test_rejects_with_confirm_false(
|
||||
self,
|
||||
tool_key: str,
|
||||
action: str,
|
||||
kwargs: dict,
|
||||
_mock_docker_graphql: AsyncMock,
|
||||
_mock_vm_graphql: AsyncMock,
|
||||
_mock_notif_graphql: AsyncMock,
|
||||
_mock_rclone_graphql: AsyncMock,
|
||||
_mock_keys_graphql: AsyncMock,
|
||||
) -> None:
|
||||
"""Explicitly passing confirm=False must still raise ToolError."""
|
||||
module_path, register_fn, tool_name = _TOOL_REGISTRY[tool_key]
|
||||
tool_fn = make_tool_fn(module_path, register_fn, tool_name)
|
||||
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action=action, confirm=False, **kwargs)
|
||||
|
||||
@pytest.mark.parametrize("tool_key,action,kwargs", _DESTRUCTIVE_TEST_CASES, ids=_CASE_IDS)
|
||||
async def test_error_message_includes_action_name(
|
||||
self,
|
||||
tool_key: str,
|
||||
action: str,
|
||||
kwargs: dict,
|
||||
_mock_docker_graphql: AsyncMock,
|
||||
_mock_vm_graphql: AsyncMock,
|
||||
_mock_notif_graphql: AsyncMock,
|
||||
_mock_rclone_graphql: AsyncMock,
|
||||
_mock_keys_graphql: AsyncMock,
|
||||
) -> None:
|
||||
"""The error message should include the action name for clarity."""
|
||||
module_path, register_fn, tool_name = _TOOL_REGISTRY[tool_key]
|
||||
tool_fn = make_tool_fn(module_path, register_fn, tool_name)
|
||||
|
||||
with pytest.raises(ToolError, match=action):
|
||||
await tool_fn(action=action, **kwargs)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Positive tests: destructive actions proceed when confirm=True
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestConfirmAllowsExecution:
|
||||
"""Destructive actions with confirm=True should reach the GraphQL layer."""
|
||||
|
||||
async def test_docker_remove_with_confirm(self, _mock_docker_graphql: AsyncMock) -> None:
|
||||
cid = "a" * 64 + ":local"
|
||||
_mock_docker_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": cid, "names": ["old-app"]}]}},
|
||||
{"docker": {"removeContainer": True}},
|
||||
]
|
||||
tool_fn = make_tool_fn("unraid_mcp.tools.docker", "register_docker_tool", "unraid_docker")
|
||||
result = await tool_fn(action="remove", container_id="old-app", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_vm_force_stop_with_confirm(self, _mock_vm_graphql: AsyncMock) -> None:
|
||||
_mock_vm_graphql.return_value = {"vm": {"forceStop": True}}
|
||||
tool_fn = make_tool_fn("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm")
|
||||
result = await tool_fn(action="force_stop", vm_id="test-uuid", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_vm_reset_with_confirm(self, _mock_vm_graphql: AsyncMock) -> None:
|
||||
_mock_vm_graphql.return_value = {"vm": {"reset": True}}
|
||||
tool_fn = make_tool_fn("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm")
|
||||
result = await tool_fn(action="reset", vm_id="test-uuid", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_notifications_delete_with_confirm(self, _mock_notif_graphql: AsyncMock) -> None:
|
||||
_mock_notif_graphql.return_value = {"notifications": {"deleteNotification": True}}
|
||||
tool_fn = make_tool_fn(
|
||||
"unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"
|
||||
)
|
||||
result = await tool_fn(
|
||||
action="delete",
|
||||
notification_id="notif-1",
|
||||
notification_type="UNREAD",
|
||||
confirm=True,
|
||||
)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_notifications_delete_archived_with_confirm(self, _mock_notif_graphql: AsyncMock) -> None:
|
||||
_mock_notif_graphql.return_value = {"notifications": {"deleteArchivedNotifications": True}}
|
||||
tool_fn = make_tool_fn(
|
||||
"unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"
|
||||
)
|
||||
result = await tool_fn(action="delete_archived", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_rclone_delete_remote_with_confirm(self, _mock_rclone_graphql: AsyncMock) -> None:
|
||||
_mock_rclone_graphql.return_value = {"rclone": {"deleteRCloneRemote": True}}
|
||||
tool_fn = make_tool_fn("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone")
|
||||
result = await tool_fn(action="delete_remote", name="my-remote", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_keys_delete_with_confirm(self, _mock_keys_graphql: AsyncMock) -> None:
|
||||
_mock_keys_graphql.return_value = {"deleteApiKeys": True}
|
||||
tool_fn = make_tool_fn("unraid_mcp.tools.keys", "register_keys_tool", "unraid_keys")
|
||||
result = await tool_fn(action="delete", key_id="key-123", confirm=True)
|
||||
assert result["success"] is True
|
||||
@@ -20,26 +20,33 @@ def _make_tool():
|
||||
|
||||
|
||||
class TestArrayValidation:
|
||||
async def test_destructive_action_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_invalid_action_rejected(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
for action in ("start", "stop", "shutdown", "reboot"):
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action=action)
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="start")
|
||||
|
||||
async def test_disk_action_requires_disk_id(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_removed_actions_are_invalid(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
for action in ("mount_disk", "unmount_disk", "clear_stats"):
|
||||
with pytest.raises(ToolError, match="disk_id"):
|
||||
for action in (
|
||||
"start",
|
||||
"stop",
|
||||
"shutdown",
|
||||
"reboot",
|
||||
"mount_disk",
|
||||
"unmount_disk",
|
||||
"clear_stats",
|
||||
):
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action=action)
|
||||
|
||||
|
||||
class TestArrayActions:
|
||||
async def test_start_array(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"setState": {"state": "STARTED"}}
|
||||
async def test_parity_start(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
result = await tool_fn(action="parity_start")
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "start"
|
||||
assert result["action"] == "parity_start"
|
||||
_mock_graphql.assert_called_once()
|
||||
|
||||
async def test_parity_start_with_correct(self, _mock_graphql: AsyncMock) -> None:
|
||||
@@ -56,45 +63,22 @@ class TestArrayActions:
|
||||
result = await tool_fn(action="parity_status")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_mount_disk(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"mountArrayDisk": True}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="mount_disk", disk_id="disk:1")
|
||||
assert result["success"] is True
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1] == {"id": "disk:1"}
|
||||
|
||||
async def test_shutdown(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"shutdown": True}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="shutdown", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "shutdown"
|
||||
|
||||
async def test_stop_array(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"setState": {"state": "STOPPED"}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="stop", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "stop"
|
||||
|
||||
async def test_reboot(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"reboot": True}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="reboot", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "reboot"
|
||||
|
||||
async def test_parity_pause(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"pause": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_pause")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_unmount_disk(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"unmountArrayDisk": True}
|
||||
async def test_parity_resume(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"resume": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="unmount_disk", disk_id="disk:1")
|
||||
result = await tool_fn(action="parity_resume")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_parity_cancel(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"cancel": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_cancel")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
||||
@@ -107,63 +91,46 @@ class TestArrayActions:
|
||||
class TestArrayMutationFailures:
|
||||
"""Tests for mutation responses that indicate failure."""
|
||||
|
||||
async def test_start_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning False in the response field should still succeed (the tool
|
||||
wraps the raw response; it doesn't inspect the inner boolean)."""
|
||||
_mock_graphql.return_value = {"setState": False}
|
||||
async def test_parity_start_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": False}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
result = await tool_fn(action="parity_start")
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": False}
|
||||
assert result["data"] == {"parityCheck": {"start": False}}
|
||||
|
||||
async def test_start_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning null for the response field."""
|
||||
_mock_graphql.return_value = {"setState": None}
|
||||
async def test_parity_start_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": None}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
result = await tool_fn(action="parity_start")
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": None}
|
||||
assert result["data"] == {"parityCheck": {"start": None}}
|
||||
|
||||
async def test_start_mutation_returns_empty_object(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning an empty object for the response field."""
|
||||
_mock_graphql.return_value = {"setState": {}}
|
||||
async def test_parity_start_mutation_returns_empty_object(
|
||||
self, _mock_graphql: AsyncMock
|
||||
) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": {}}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
result = await tool_fn(action="parity_start")
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": {}}
|
||||
|
||||
async def test_mount_disk_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""mountArrayDisk returning False indicates mount failed."""
|
||||
_mock_graphql.return_value = {"mountArrayDisk": False}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="mount_disk", disk_id="disk:1")
|
||||
assert result["success"] is True
|
||||
assert result["data"]["mountArrayDisk"] is False
|
||||
assert result["data"] == {"parityCheck": {"start": {}}}
|
||||
|
||||
async def test_mutation_timeout(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mid-operation timeout should be wrapped in ToolError."""
|
||||
|
||||
_mock_graphql.side_effect = TimeoutError("operation timed out")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="timed out"):
|
||||
await tool_fn(action="shutdown", confirm=True)
|
||||
await tool_fn(action="parity_cancel")
|
||||
|
||||
|
||||
class TestArrayNetworkErrors:
|
||||
"""Tests for network-level failures in array operations."""
|
||||
|
||||
async def test_http_500_server_error(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""HTTP 500 from the API should be wrapped in ToolError."""
|
||||
mock_response = AsyncMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.text = "Internal Server Error"
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 500: Internal Server Error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="HTTP error 500"):
|
||||
await tool_fn(action="start", confirm=True)
|
||||
await tool_fn(action="parity_start")
|
||||
|
||||
async def test_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Connection refused should be wrapped in ToolError."""
|
||||
_mock_graphql.side_effect = ToolError("Network connection error: Connection refused")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Network connection error"):
|
||||
|
||||
@@ -368,9 +368,7 @@ class TestGraphQLErrorHandling:
|
||||
async def test_graphql_error_raises_tool_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Field 'bogus' not found"}]
|
||||
}
|
||||
mock_response.json.return_value = {"errors": [{"message": "Field 'bogus' not found"}]}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
@@ -403,9 +401,7 @@ class TestGraphQLErrorHandling:
|
||||
async def test_idempotent_start_returns_success(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Container already running"}]
|
||||
}
|
||||
mock_response.json.return_value = {"errors": [{"message": "Container already running"}]}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
@@ -421,9 +417,7 @@ class TestGraphQLErrorHandling:
|
||||
async def test_idempotent_stop_returns_success(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Container not running"}]
|
||||
}
|
||||
mock_response.json.return_value = {"errors": [{"message": "Container not running"}]}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
@@ -440,9 +434,7 @@ class TestGraphQLErrorHandling:
|
||||
"""An error that doesn't match idempotent patterns still raises even with context."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Permission denied"}]
|
||||
}
|
||||
mock_response.json.return_value = {"errors": [{"message": "Permission denied"}]}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
@@ -93,8 +93,21 @@ class TestDockerActions:
|
||||
async def test_start_container(self, _mock_graphql: AsyncMock) -> None:
|
||||
# First call resolves ID, second performs start
|
||||
_mock_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": "abc123def456" * 4 + "abcd1234abcd1234:local", "names": ["plex"]}]}},
|
||||
{"docker": {"start": {"id": "abc123def456" * 4 + "abcd1234abcd1234:local", "state": "running"}}},
|
||||
{
|
||||
"docker": {
|
||||
"containers": [
|
||||
{"id": "abc123def456" * 4 + "abcd1234abcd1234:local", "names": ["plex"]}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"docker": {
|
||||
"start": {
|
||||
"id": "abc123def456" * 4 + "abcd1234abcd1234:local",
|
||||
"state": "running",
|
||||
}
|
||||
}
|
||||
},
|
||||
]
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", container_id="plex")
|
||||
@@ -114,7 +127,9 @@ class TestDockerActions:
|
||||
|
||||
async def test_check_updates(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"docker": {"containerUpdateStatuses": [{"id": "c1", "name": "plex", "updateAvailable": True}]}
|
||||
"docker": {
|
||||
"containerUpdateStatuses": [{"id": "c1", "name": "plex", "updateAvailable": True}]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="check_updates")
|
||||
@@ -175,7 +190,11 @@ class TestDockerActions:
|
||||
|
||||
async def test_details_found(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"docker": {"containers": [{"id": "c1", "names": ["plex"], "state": "running", "image": "plexinc/pms"}]}
|
||||
"docker": {
|
||||
"containers": [
|
||||
{"id": "c1", "names": ["plex"], "state": "running", "image": "plexinc/pms"}
|
||||
]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="details", container_id="plex")
|
||||
|
||||
@@ -44,12 +44,8 @@ class TestHealthActions:
|
||||
"os": {"uptime": 86400},
|
||||
},
|
||||
"array": {"state": "STARTED"},
|
||||
"notifications": {
|
||||
"overview": {"unread": {"alert": 0, "warning": 0, "total": 3}}
|
||||
},
|
||||
"docker": {
|
||||
"containers": [{"id": "c1", "state": "running", "status": "Up 2 days"}]
|
||||
},
|
||||
"notifications": {"overview": {"unread": {"alert": 0, "warning": 0, "total": 3}}},
|
||||
"docker": {"containers": [{"id": "c1", "state": "running", "status": "Up 2 days"}]},
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="check")
|
||||
@@ -60,9 +56,7 @@ class TestHealthActions:
|
||||
_mock_graphql.return_value = {
|
||||
"info": {"machineId": "abc", "versions": {"unraid": "7.2"}, "os": {"uptime": 100}},
|
||||
"array": {"state": "STARTED"},
|
||||
"notifications": {
|
||||
"overview": {"unread": {"alert": 3, "warning": 0, "total": 3}}
|
||||
},
|
||||
"notifications": {"overview": {"unread": {"alert": 3, "warning": 0, "total": 3}}},
|
||||
"docker": {"containers": []},
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
@@ -88,9 +82,7 @@ class TestHealthActions:
|
||||
_mock_graphql.return_value = {
|
||||
"info": {},
|
||||
"array": {"state": "STARTED"},
|
||||
"notifications": {
|
||||
"overview": {"unread": {"alert": 5, "warning": 0, "total": 5}}
|
||||
},
|
||||
"notifications": {"overview": {"unread": {"alert": 5, "warning": 0, "total": 5}}},
|
||||
"docker": {"containers": []},
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
@@ -102,10 +94,13 @@ class TestHealthActions:
|
||||
async def test_diagnose_wraps_exception(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""When _diagnose_subscriptions raises, tool wraps in ToolError."""
|
||||
tool_fn = _make_tool()
|
||||
with patch(
|
||||
"unraid_mcp.tools.health._diagnose_subscriptions",
|
||||
side_effect=RuntimeError("broken"),
|
||||
), pytest.raises(ToolError, match="broken"):
|
||||
with (
|
||||
patch(
|
||||
"unraid_mcp.tools.health._diagnose_subscriptions",
|
||||
side_effect=RuntimeError("broken"),
|
||||
),
|
||||
pytest.raises(ToolError, match="broken"),
|
||||
):
|
||||
await tool_fn(action="diagnose")
|
||||
|
||||
async def test_diagnose_success(self, _mock_graphql: AsyncMock) -> None:
|
||||
@@ -131,11 +126,14 @@ class TestHealthActions:
|
||||
|
||||
try:
|
||||
# Replace the modules with objects that raise ImportError on access
|
||||
with patch.dict(sys.modules, {
|
||||
"unraid_mcp.subscriptions": None,
|
||||
"unraid_mcp.subscriptions.manager": None,
|
||||
"unraid_mcp.subscriptions.resources": None,
|
||||
}):
|
||||
with patch.dict(
|
||||
sys.modules,
|
||||
{
|
||||
"unraid_mcp.subscriptions": None,
|
||||
"unraid_mcp.subscriptions.manager": None,
|
||||
"unraid_mcp.subscriptions.resources": None,
|
||||
},
|
||||
):
|
||||
result = await _diagnose_subscriptions()
|
||||
assert "error" in result
|
||||
finally:
|
||||
|
||||
@@ -20,7 +20,14 @@ from unraid_mcp.tools.info import (
|
||||
class TestProcessSystemInfo:
|
||||
def test_processes_os_info(self) -> None:
|
||||
raw = {
|
||||
"os": {"distro": "Unraid", "release": "7.2", "platform": "linux", "arch": "x86_64", "hostname": "tower", "uptime": 3600},
|
||||
"os": {
|
||||
"distro": "Unraid",
|
||||
"release": "7.2",
|
||||
"platform": "linux",
|
||||
"arch": "x86_64",
|
||||
"hostname": "tower",
|
||||
"uptime": 3600,
|
||||
},
|
||||
"cpu": {"manufacturer": "AMD", "brand": "Ryzen", "cores": 8, "threads": 16},
|
||||
}
|
||||
result = _process_system_info(raw)
|
||||
@@ -34,7 +41,19 @@ class TestProcessSystemInfo:
|
||||
assert result["summary"] == {"memory_summary": "Memory information not available."}
|
||||
|
||||
def test_processes_memory_layout(self) -> None:
|
||||
raw = {"memory": {"layout": [{"bank": "0", "type": "DDR4", "clockSpeed": 3200, "manufacturer": "G.Skill", "partNum": "XYZ"}]}}
|
||||
raw = {
|
||||
"memory": {
|
||||
"layout": [
|
||||
{
|
||||
"bank": "0",
|
||||
"type": "DDR4",
|
||||
"clockSpeed": 3200,
|
||||
"manufacturer": "G.Skill",
|
||||
"partNum": "XYZ",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
result = _process_system_info(raw)
|
||||
assert len(result["summary"]["memory_layout_details"]) == 1
|
||||
|
||||
@@ -130,7 +149,13 @@ class TestUnraidInfoTool:
|
||||
async def test_overview_action(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"info": {
|
||||
"os": {"distro": "Unraid", "release": "7.2", "platform": "linux", "arch": "x86_64", "hostname": "test"},
|
||||
"os": {
|
||||
"distro": "Unraid",
|
||||
"release": "7.2",
|
||||
"platform": "linux",
|
||||
"arch": "x86_64",
|
||||
"hostname": "test",
|
||||
},
|
||||
"cpu": {"manufacturer": "Intel", "brand": "i7", "cores": 4, "threads": 8},
|
||||
}
|
||||
}
|
||||
@@ -165,7 +190,9 @@ class TestUnraidInfoTool:
|
||||
await tool_fn(action="online")
|
||||
|
||||
async def test_metrics(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"metrics": {"cpu": {"used": 25.5}, "memory": {"used": 8192, "total": 32768}}}
|
||||
_mock_graphql.return_value = {
|
||||
"metrics": {"cpu": {"used": 25.5}, "memory": {"used": 8192, "total": 32768}}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="metrics")
|
||||
assert result["cpu"]["used"] == 25.5
|
||||
@@ -178,7 +205,9 @@ class TestUnraidInfoTool:
|
||||
assert result["services"][0]["name"] == "docker"
|
||||
|
||||
async def test_settings(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"settings": {"unified": {"values": {"timezone": "US/Eastern"}}}}
|
||||
_mock_graphql.return_value = {
|
||||
"settings": {"unified": {"values": {"timezone": "US/Eastern"}}}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="settings")
|
||||
assert result["timezone"] == "US/Eastern"
|
||||
@@ -191,20 +220,32 @@ class TestUnraidInfoTool:
|
||||
assert result == {"raw": "raw_string"}
|
||||
|
||||
async def test_servers(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"servers": [{"id": "s:1", "name": "tower", "status": "online"}]}
|
||||
_mock_graphql.return_value = {
|
||||
"servers": [{"id": "s:1", "name": "tower", "status": "online"}]
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="servers")
|
||||
assert len(result["servers"]) == 1
|
||||
assert result["servers"][0]["name"] == "tower"
|
||||
|
||||
async def test_flash(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"flash": {"id": "f:1", "guid": "abc", "product": "SanDisk", "vendor": "SanDisk", "size": 32000000000}}
|
||||
_mock_graphql.return_value = {
|
||||
"flash": {
|
||||
"id": "f:1",
|
||||
"guid": "abc",
|
||||
"product": "SanDisk",
|
||||
"vendor": "SanDisk",
|
||||
"size": 32000000000,
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="flash")
|
||||
assert result["product"] == "SanDisk"
|
||||
|
||||
async def test_ups_devices(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"upsDevices": [{"id": "ups:1", "model": "APC", "status": "online", "charge": 100}]}
|
||||
_mock_graphql.return_value = {
|
||||
"upsDevices": [{"id": "ups:1", "model": "APC", "status": "online", "charge": 100}]
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="ups_devices")
|
||||
assert len(result["ups_devices"]) == 1
|
||||
|
||||
@@ -56,7 +56,9 @@ class TestKeysActions:
|
||||
assert len(result["keys"]) == 1
|
||||
|
||||
async def test_get(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"apiKey": {"id": "k:1", "name": "mcp-key", "roles": ["admin"]}}
|
||||
_mock_graphql.return_value = {
|
||||
"apiKey": {"id": "k:1", "name": "mcp-key", "roles": ["admin"]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="get", key_id="k:1")
|
||||
assert result["name"] == "mcp-key"
|
||||
@@ -72,7 +74,12 @@ class TestKeysActions:
|
||||
|
||||
async def test_create_with_roles(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"createApiKey": {"id": "k:new", "name": "admin-key", "key": "secret", "roles": ["admin"]}
|
||||
"createApiKey": {
|
||||
"id": "k:new",
|
||||
"name": "admin-key",
|
||||
"key": "secret",
|
||||
"roles": ["admin"],
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="create", name="admin-key", roles=["admin"])
|
||||
|
||||
@@ -11,7 +11,9 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
with patch(
|
||||
"unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@@ -64,9 +66,7 @@ class TestNotificationsActions:
|
||||
|
||||
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"notifications": {
|
||||
"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]
|
||||
}
|
||||
"notifications": {"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list")
|
||||
@@ -82,7 +82,9 @@ class TestNotificationsActions:
|
||||
|
||||
async def test_create(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"notifications": {"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}}
|
||||
"notifications": {
|
||||
"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(
|
||||
@@ -126,9 +128,7 @@ class TestNotificationsActions:
|
||||
|
||||
async def test_list_with_importance_filter(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"notifications": {
|
||||
"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]
|
||||
}
|
||||
"notifications": {"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list", importance="warning", limit=10, offset=5)
|
||||
|
||||
@@ -39,9 +39,7 @@ class TestRcloneValidation:
|
||||
|
||||
class TestRcloneActions:
|
||||
async def test_list_remotes(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"rclone": {"remotes": [{"name": "gdrive", "type": "drive"}]}
|
||||
}
|
||||
_mock_graphql.return_value = {"rclone": {"remotes": [{"name": "gdrive", "type": "drive"}]}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list_remotes")
|
||||
assert len(result["remotes"]) == 1
|
||||
|
||||
@@ -95,7 +95,14 @@ class TestStorageActions:
|
||||
|
||||
async def test_disk_details(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"disk": {"id": "d:1", "device": "sda", "name": "WD", "serialNum": "SN1", "size": 1073741824, "temperature": 35}
|
||||
"disk": {
|
||||
"id": "d:1",
|
||||
"device": "sda",
|
||||
"name": "WD",
|
||||
"serialNum": "SN1",
|
||||
"size": 1073741824,
|
||||
"temperature": 35,
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="disk_details", disk_id="d:1")
|
||||
@@ -121,7 +128,9 @@ class TestStorageActions:
|
||||
assert len(result["log_files"]) == 1
|
||||
|
||||
async def test_logs(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "log line", "totalLines": 1}}
|
||||
_mock_graphql.return_value = {
|
||||
"logFile": {"path": "/var/log/syslog", "content": "log line", "totalLines": 1}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="logs", log_path="/var/log/syslog")
|
||||
assert result["content"] == "log line"
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
"""Tests for unraid_users tool."""
|
||||
"""Tests for unraid_users tool.
|
||||
|
||||
NOTE: Unraid GraphQL API only supports the me() query.
|
||||
User management operations (list, add, delete, cloud, remote_access, origins) are NOT available in the API.
|
||||
"""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
@@ -20,112 +24,54 @@ def _make_tool():
|
||||
|
||||
|
||||
class TestUsersValidation:
|
||||
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action="delete", user_id="u:1")
|
||||
"""Test validation for invalid actions."""
|
||||
|
||||
async def test_get_requires_user_id(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_invalid_action_rejected(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Test that non-existent actions are rejected with clear error."""
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="user_id"):
|
||||
await tool_fn(action="get")
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
async def test_add_requires_name_and_password(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="requires name and password"):
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="add")
|
||||
|
||||
async def test_delete_requires_user_id(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="user_id"):
|
||||
await tool_fn(action="delete", confirm=True)
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="delete")
|
||||
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="cloud")
|
||||
|
||||
|
||||
class TestUsersActions:
|
||||
"""Test the single supported action: me."""
|
||||
|
||||
async def test_me(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"me": {"id": "u:1", "name": "root", "description": "", "roles": ["ADMIN"]}}
|
||||
"""Test querying current authenticated user."""
|
||||
_mock_graphql.return_value = {
|
||||
"me": {"id": "u:1", "name": "root", "description": "", "roles": ["ADMIN"]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="me")
|
||||
assert result["name"] == "root"
|
||||
assert result["roles"] == ["ADMIN"]
|
||||
_mock_graphql.assert_called_once()
|
||||
|
||||
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_me_default_action(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Test that 'me' is the default action."""
|
||||
_mock_graphql.return_value = {
|
||||
"users": [{"id": "u:1", "name": "root"}, {"id": "u:2", "name": "guest"}]
|
||||
"me": {"id": "u:1", "name": "root", "description": "", "roles": ["ADMIN"]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list")
|
||||
assert len(result["users"]) == 2
|
||||
|
||||
async def test_get(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"user": {"id": "u:1", "name": "root", "description": "", "roles": ["ADMIN"]}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="get", user_id="u:1")
|
||||
result = await tool_fn()
|
||||
assert result["name"] == "root"
|
||||
|
||||
async def test_add(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"addUser": {"id": "u:3", "name": "newuser", "description": "", "roles": ["USER"]}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="add", name="newuser", password="pass123")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_add_with_role(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"addUser": {"id": "u:3", "name": "admin2", "description": "", "roles": ["ADMIN"]}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="add", name="admin2", password="pass123", role="admin")
|
||||
assert result["success"] is True
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1]["input"]["role"] == "ADMIN"
|
||||
|
||||
async def test_delete(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"deleteUser": {"id": "u:2", "name": "guest"}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="delete", user_id="u:2", confirm=True)
|
||||
assert result["success"] is True
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1]["input"]["id"] == "u:2"
|
||||
|
||||
async def test_cloud(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"cloud": {"status": "connected", "apiKey": "***"}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="cloud")
|
||||
assert result["status"] == "connected"
|
||||
|
||||
async def test_remote_access(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"remoteAccess": {"enabled": True, "url": "https://example.com"}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="remote_access")
|
||||
assert result["enabled"] is True
|
||||
|
||||
async def test_origins(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"allowedOrigins": ["http://localhost", "https://example.com"]}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="origins")
|
||||
assert len(result["origins"]) == 2
|
||||
|
||||
|
||||
class TestUsersNoneHandling:
|
||||
"""Verify actions return empty dict (not TypeError) when API returns None."""
|
||||
|
||||
async def test_me_returns_none(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Test that me returns empty dict when API returns None."""
|
||||
_mock_graphql.return_value = {"me": None}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="me")
|
||||
assert result == {}
|
||||
|
||||
async def test_get_returns_none(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"user": None}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="get", user_id="u:1")
|
||||
assert result == {}
|
||||
|
||||
async def test_cloud_returns_none(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"cloud": None}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="cloud")
|
||||
assert result == {}
|
||||
|
||||
async def test_remote_access_returns_none(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"remoteAccess": None}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="remote_access")
|
||||
assert result == {}
|
||||
|
||||
@@ -11,7 +11,9 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
with patch(
|
||||
"unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@@ -67,7 +69,9 @@ class TestVmActions:
|
||||
|
||||
async def test_details_by_uuid(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
"vms": {
|
||||
"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="details", vm_id="uuid-1")
|
||||
@@ -75,7 +79,9 @@ class TestVmActions:
|
||||
|
||||
async def test_details_by_name(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
"vms": {
|
||||
"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="details", vm_id="Win11")
|
||||
@@ -83,7 +89,9 @@ class TestVmActions:
|
||||
|
||||
async def test_details_not_found(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
"vms": {
|
||||
"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="not found"):
|
||||
|
||||
Reference in New Issue
Block a user