mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-23 12:39:24 -07:00
refactor(tools)!: consolidate 15 individual tools into single unified unraid tool
BREAKING CHANGE: Replaces 15 separate MCP tools (unraid_info, unraid_array, unraid_storage, unraid_docker, unraid_vm, unraid_notifications, unraid_rclone, unraid_users, unraid_keys, unraid_health, unraid_settings, unraid_customization, unraid_plugins, unraid_oidc, unraid_live) with a single `unraid` tool using action (domain) + subaction (operation) routing. New interface: unraid(action="system", subaction="overview") replaces unraid_info(action="overview"). All 15 domains and ~108 subactions preserved. - Add unraid_mcp/tools/unraid.py (1891 lines, all domains consolidated) - Remove 15 individual tool files - Update tools/__init__.py to register single unified tool - Update server.py for new tool registration pattern - Update subscriptions/manager.py and resources.py for new tool names - Update all 25 test files + integration/contract/safety/schema/property tests - Update mcporter smoke-test script for new tool interface - Bump version 0.6.0 → 1.0.0 Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
"""Tests for unraid_array tool."""
|
||||
"""Tests for array subactions of the consolidated unraid tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
@@ -11,36 +11,36 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.array.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
with patch("unraid_mcp.tools.unraid.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def _make_tool():
|
||||
return make_tool_fn("unraid_mcp.tools.array", "register_array_tool", "unraid_array")
|
||||
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
|
||||
|
||||
|
||||
class TestArrayValidation:
|
||||
async def test_invalid_action_rejected(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_invalid_subaction_rejected(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action="start")
|
||||
with pytest.raises(ToolError, match="Invalid subaction"):
|
||||
await tool_fn(action="array", subaction="start")
|
||||
|
||||
async def test_removed_actions_are_invalid(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
for action in (
|
||||
for subaction in (
|
||||
"start",
|
||||
"stop",
|
||||
"shutdown",
|
||||
"reboot",
|
||||
"clear_stats",
|
||||
):
|
||||
with pytest.raises(ToolError, match="Invalid action"):
|
||||
await tool_fn(action=action)
|
||||
with pytest.raises(ToolError, match="Invalid subaction"):
|
||||
await tool_fn(action="array", subaction=subaction)
|
||||
|
||||
async def test_parity_start_requires_correct(self, _mock_graphql: AsyncMock) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="correct is required"):
|
||||
await tool_fn(action="parity_start")
|
||||
await tool_fn(action="array", subaction="parity_start")
|
||||
_mock_graphql.assert_not_called()
|
||||
|
||||
|
||||
@@ -48,9 +48,9 @@ class TestArrayActions:
|
||||
async def test_parity_start(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_start", correct=False)
|
||||
result = await tool_fn(action="array", subaction="parity_start", correct=False)
|
||||
assert result["success"] is True
|
||||
assert result["action"] == "parity_start"
|
||||
assert result["subaction"] == "parity_start"
|
||||
_mock_graphql.assert_called_once()
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1] == {"correct": False}
|
||||
@@ -58,7 +58,7 @@ class TestArrayActions:
|
||||
async def test_parity_start_with_correct(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_start", correct=True)
|
||||
result = await tool_fn(action="array", subaction="parity_start", correct=True)
|
||||
assert result["success"] is True
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1] == {"correct": True}
|
||||
@@ -66,32 +66,32 @@ class TestArrayActions:
|
||||
async def test_parity_status(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"array": {"parityCheckStatus": {"progress": 50}}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_status")
|
||||
result = await tool_fn(action="array", subaction="parity_status")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_parity_pause(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"pause": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_pause")
|
||||
result = await tool_fn(action="array", subaction="parity_pause")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_parity_resume(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"resume": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_resume")
|
||||
result = await tool_fn(action="array", subaction="parity_resume")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_parity_cancel(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"cancel": True}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_cancel")
|
||||
result = await tool_fn(action="array", subaction="parity_cancel")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.side_effect = RuntimeError("disk error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to execute array/parity_status"):
|
||||
await tool_fn(action="parity_status")
|
||||
await tool_fn(action="array", subaction="parity_status")
|
||||
|
||||
|
||||
class TestArrayMutationFailures:
|
||||
@@ -100,14 +100,14 @@ class TestArrayMutationFailures:
|
||||
async def test_parity_start_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": False}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_start", correct=False)
|
||||
result = await tool_fn(action="array", subaction="parity_start", correct=False)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"parityCheck": {"start": False}}
|
||||
|
||||
async def test_parity_start_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": None}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_start", correct=False)
|
||||
result = await tool_fn(action="array", subaction="parity_start", correct=False)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"parityCheck": {"start": None}}
|
||||
|
||||
@@ -116,7 +116,7 @@ class TestArrayMutationFailures:
|
||||
) -> None:
|
||||
_mock_graphql.return_value = {"parityCheck": {"start": {}}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_start", correct=False)
|
||||
result = await tool_fn(action="array", subaction="parity_start", correct=False)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"parityCheck": {"start": {}}}
|
||||
|
||||
@@ -124,7 +124,7 @@ class TestArrayMutationFailures:
|
||||
_mock_graphql.side_effect = TimeoutError("operation timed out")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="timed out"):
|
||||
await tool_fn(action="parity_cancel")
|
||||
await tool_fn(action="array", subaction="parity_cancel")
|
||||
|
||||
|
||||
class TestArrayNetworkErrors:
|
||||
@@ -134,13 +134,13 @@ class TestArrayNetworkErrors:
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 500: Internal Server Error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="HTTP error 500"):
|
||||
await tool_fn(action="parity_start", correct=False)
|
||||
await tool_fn(action="array", subaction="parity_start", correct=False)
|
||||
|
||||
async def test_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.side_effect = ToolError("Network connection error: Connection refused")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Network connection error"):
|
||||
await tool_fn(action="parity_status")
|
||||
await tool_fn(action="array", subaction="parity_status")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -156,7 +156,7 @@ async def test_parity_history_returns_history(_mock_graphql):
|
||||
_mock_graphql.return_value = {
|
||||
"parityHistory": [{"date": "2026-03-01T00:00:00Z", "status": "COMPLETED", "errors": 0}]
|
||||
}
|
||||
result = await _make_tool()(action="parity_history")
|
||||
result = await _make_tool()(action="array", subaction="parity_history")
|
||||
assert result["success"] is True
|
||||
assert len(result["data"]["parityHistory"]) == 1
|
||||
|
||||
@@ -167,20 +167,20 @@ async def test_parity_history_returns_history(_mock_graphql):
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_array(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"setState": {"state": "STARTED"}}}
|
||||
result = await _make_tool()(action="start_array")
|
||||
result = await _make_tool()(action="array", subaction="start_array")
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_array_requires_confirm(_mock_graphql):
|
||||
with pytest.raises(ToolError, match="not confirmed"):
|
||||
await _make_tool()(action="stop_array", confirm=False)
|
||||
await _make_tool()(action="array", subaction="stop_array", confirm=False)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_array_with_confirm(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"setState": {"state": "STOPPED"}}}
|
||||
result = await _make_tool()(action="stop_array", confirm=True)
|
||||
result = await _make_tool()(action="array", subaction="stop_array", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@@ -190,13 +190,13 @@ async def test_stop_array_with_confirm(_mock_graphql):
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_disk_requires_disk_id(_mock_graphql):
|
||||
with pytest.raises(ToolError, match="disk_id"):
|
||||
await _make_tool()(action="add_disk")
|
||||
await _make_tool()(action="array", subaction="add_disk")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_disk_success(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"addDiskToArray": {"state": "STARTED"}}}
|
||||
result = await _make_tool()(action="add_disk", disk_id="abc123:local")
|
||||
result = await _make_tool()(action="array", subaction="add_disk", disk_id="abc123:local")
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@@ -206,13 +206,17 @@ async def test_add_disk_success(_mock_graphql):
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_disk_requires_confirm(_mock_graphql):
|
||||
with pytest.raises(ToolError, match="not confirmed"):
|
||||
await _make_tool()(action="remove_disk", disk_id="abc123:local", confirm=False)
|
||||
await _make_tool()(
|
||||
action="array", subaction="remove_disk", disk_id="abc123:local", confirm=False
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_disk_with_confirm(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"removeDiskFromArray": {"state": "STOPPED"}}}
|
||||
result = await _make_tool()(action="remove_disk", disk_id="abc123:local", confirm=True)
|
||||
result = await _make_tool()(
|
||||
action="array", subaction="remove_disk", disk_id="abc123:local", confirm=True
|
||||
)
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@@ -222,13 +226,13 @@ async def test_remove_disk_with_confirm(_mock_graphql):
|
||||
@pytest.mark.asyncio
|
||||
async def test_mount_disk_requires_disk_id(_mock_graphql):
|
||||
with pytest.raises(ToolError, match="disk_id"):
|
||||
await _make_tool()(action="mount_disk")
|
||||
await _make_tool()(action="array", subaction="mount_disk")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unmount_disk_success(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"unmountArrayDisk": {"id": "abc123:local"}}}
|
||||
result = await _make_tool()(action="unmount_disk", disk_id="abc123:local")
|
||||
result = await _make_tool()(action="array", subaction="unmount_disk", disk_id="abc123:local")
|
||||
assert result["success"] is True
|
||||
|
||||
|
||||
@@ -238,11 +242,15 @@ async def test_unmount_disk_success(_mock_graphql):
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_disk_stats_requires_confirm(_mock_graphql):
|
||||
with pytest.raises(ToolError, match="not confirmed"):
|
||||
await _make_tool()(action="clear_disk_stats", disk_id="abc123:local", confirm=False)
|
||||
await _make_tool()(
|
||||
action="array", subaction="clear_disk_stats", disk_id="abc123:local", confirm=False
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_disk_stats_with_confirm(_mock_graphql):
|
||||
_mock_graphql.return_value = {"array": {"clearArrayDiskStatistics": True}}
|
||||
result = await _make_tool()(action="clear_disk_stats", disk_id="abc123:local", confirm=True)
|
||||
result = await _make_tool()(
|
||||
action="array", subaction="clear_disk_stats", disk_id="abc123:local", confirm=True
|
||||
)
|
||||
assert result["success"] is True
|
||||
|
||||
Reference in New Issue
Block a user