mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-02 00:04:45 -08:00
feat: consolidate 26 tools into 10 tools with 90 actions
Refactor the entire tool layer to use the consolidated action pattern (action: Literal[...] with QUERIES/MUTATIONS dicts). This reduces LLM context from ~12k to ~5k tokens while adding ~60 new API capabilities. New tools: unraid_info (19 actions), unraid_array (12), unraid_notifications (9), unraid_users (8), unraid_keys (5). Rewritten: unraid_docker (15), unraid_vm (9), unraid_storage (6), unraid_rclone (4), unraid_health (3). Includes 129 tests across 10 test files, code review fixes for 16 issues (severity ordering, PrefixedID regex, sensitive var redaction, etc.). Removes tools/system.py (replaced by tools/info.py). Version bumped to 0.2.0.
This commit is contained in:
109
tests/test_vm.py
Normal file
109
tests/test_vm.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Tests for unraid_vm tool."""
|
||||
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import make_tool_fn
|
||||
|
||||
from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
with patch("unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def _make_tool():
|
||||
return make_tool_fn("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm")
|
||||
|
||||
|
||||
class TestVmValidation:
|
||||
async def test_actions_except_list_require_vm_id(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
for action in ("details", "start", "stop", "pause", "resume", "reboot"):
|
||||
with pytest.raises(ToolError, match="vm_id"):
|
||||
await tool_fn(action=action)
|
||||
|
||||
async def test_destructive_actions_require_confirm(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
for action in ("force_stop", "reset"):
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action=action, vm_id="uuid-1")
|
||||
|
||||
async def test_destructive_vm_id_check_before_confirm(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Destructive actions without vm_id should fail on confirm first."""
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action="force_stop")
|
||||
|
||||
|
||||
class TestVmActions:
|
||||
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {
|
||||
"domains": [
|
||||
{"id": "vm:1", "name": "Windows 11", "state": "RUNNING", "uuid": "uuid-1"},
|
||||
]
|
||||
}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list")
|
||||
assert len(result["vms"]) == 1
|
||||
assert result["vms"][0]["name"] == "Windows 11"
|
||||
|
||||
async def test_list_empty(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"vms": {"domains": []}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list")
|
||||
assert result["vms"] == []
|
||||
|
||||
async def test_list_no_vms_key(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="list")
|
||||
assert result["vms"] == []
|
||||
|
||||
async def test_details_by_uuid(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="details", vm_id="uuid-1")
|
||||
assert result["name"] == "Win11"
|
||||
|
||||
async def test_details_by_name(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="details", vm_id="Win11")
|
||||
assert result["uuid"] == "uuid-1"
|
||||
|
||||
async def test_details_not_found(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {
|
||||
"vms": {"domains": [{"id": "vm:1", "name": "Win11", "state": "RUNNING", "uuid": "uuid-1"}]}
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="not found"):
|
||||
await tool_fn(action="details", vm_id="nonexistent")
|
||||
|
||||
async def test_start_vm(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"vm": {"start": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", vm_id="uuid-1")
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "start"
|
||||
|
||||
async def test_force_stop(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"vm": {"forceStop": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="force_stop", vm_id="uuid-1", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "force_stop"
|
||||
|
||||
async def test_mutation_unexpected_response(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"vm": {}}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to start"):
|
||||
await tool_fn(action="start", vm_id="uuid-1")
|
||||
Reference in New Issue
Block a user