mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-23 12:39:24 -07:00
BREAKING CHANGE: Replaces 15 separate MCP tools (unraid_info, unraid_array, unraid_storage, unraid_docker, unraid_vm, unraid_notifications, unraid_rclone, unraid_users, unraid_keys, unraid_health, unraid_settings, unraid_customization, unraid_plugins, unraid_oidc, unraid_live) with a single `unraid` tool using action (domain) + subaction (operation) routing. New interface: unraid(action="system", subaction="overview") replaces unraid_info(action="overview"). All 15 domains and ~108 subactions preserved. - Add unraid_mcp/tools/unraid.py (1891 lines, all domains consolidated) - Remove 15 individual tool files - Update tools/__init__.py to register single unified tool - Update server.py for new tool registration pattern - Update subscriptions/manager.py and resources.py for new tool names - Update all 25 test files + integration/contract/safety/schema/property tests - Update mcporter smoke-test script for new tool interface - Bump version 0.6.0 → 1.0.0 Co-authored-by: Claude <noreply@anthropic.com>
327 lines
13 KiB
Python
327 lines
13 KiB
Python
"""Tests for notification subactions of the consolidated unraid tool."""
|
|
|
|
from collections.abc import Generator
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
import pytest
|
|
from conftest import make_tool_fn
|
|
|
|
from unraid_mcp.core.exceptions import ToolError
|
|
|
|
|
|
@pytest.fixture
|
|
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
|
with patch("unraid_mcp.tools.unraid.make_graphql_request", new_callable=AsyncMock) as mock:
|
|
yield mock
|
|
|
|
|
|
def _make_tool():
|
|
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
|
|
|
|
|
|
class TestNotificationsValidation:
|
|
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="not confirmed"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="delete",
|
|
notification_id="n:1",
|
|
notification_type="UNREAD",
|
|
)
|
|
|
|
async def test_delete_archived_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="not confirmed"):
|
|
await tool_fn(action="notification", subaction="delete_archived")
|
|
|
|
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires title"):
|
|
await tool_fn(action="notification", subaction="create")
|
|
|
|
async def test_archive_requires_id(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="notification_id"):
|
|
await tool_fn(action="notification", subaction="archive")
|
|
|
|
async def test_delete_requires_id_and_type(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires notification_id"):
|
|
await tool_fn(action="notification", subaction="delete", confirm=True)
|
|
|
|
|
|
class TestNotificationsActions:
|
|
async def test_overview(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {
|
|
"overview": {
|
|
"unread": {"info": 5, "warning": 2, "alert": 0, "total": 7},
|
|
"archive": {"info": 10, "warning": 1, "alert": 0, "total": 11},
|
|
}
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="overview")
|
|
assert result["unread"]["total"] == 7
|
|
|
|
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="list")
|
|
assert len(result["notifications"]) == 1
|
|
|
|
async def test_create(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="Test",
|
|
subject="Test Subject",
|
|
description="Test Desc",
|
|
importance="info",
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"archiveNotification": {"id": "n:1"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="archive", notification_id="n:1")
|
|
assert result["success"] is True
|
|
|
|
async def test_delete_with_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"deleteNotification": {
|
|
"unread": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
"archive": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification",
|
|
subaction="delete",
|
|
notification_id="n:1",
|
|
notification_type="unread",
|
|
confirm=True,
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_all(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"archiveAll": {
|
|
"unread": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
"archive": {"info": 0, "warning": 0, "alert": 0, "total": 1},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="archive_all")
|
|
assert result["success"] is True
|
|
|
|
async def test_unread_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"unreadNotification": {"id": "n:1"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="unread", notification_id="n:1")
|
|
assert result["success"] is True
|
|
assert result["subaction"] == "unread"
|
|
|
|
async def test_list_with_importance_filter(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification", subaction="list", importance="warning", limit=10, offset=5
|
|
)
|
|
assert len(result["notifications"]) == 1
|
|
call_args = _mock_graphql.call_args
|
|
filter_var = call_args[0][1]["filter"]
|
|
assert filter_var["importance"] == "WARNING"
|
|
assert filter_var["limit"] == 10
|
|
assert filter_var["offset"] == 5
|
|
|
|
async def test_delete_archived(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"deleteArchivedNotifications": {
|
|
"unread": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
"archive": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="delete_archived", confirm=True)
|
|
assert result["success"] is True
|
|
assert result["subaction"] == "delete_archived"
|
|
|
|
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.side_effect = RuntimeError("boom")
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Failed to execute notification/overview"):
|
|
await tool_fn(action="notification", subaction="overview")
|
|
|
|
|
|
class TestNotificationsCreateValidation:
|
|
"""Tests for importance enum and field length validation added in this PR."""
|
|
|
|
async def test_invalid_importance_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Invalid importance"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="T",
|
|
subject="S",
|
|
description="D",
|
|
importance="invalid",
|
|
)
|
|
|
|
async def test_normal_importance_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
"""NORMAL is not a valid GraphQL NotificationImportance value (INFO/WARNING/ALERT are)."""
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Invalid importance"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="T",
|
|
subject="S",
|
|
description="D",
|
|
importance="normal",
|
|
)
|
|
|
|
async def test_alert_importance_accepted(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"createNotification": {"id": "n:1", "importance": "ALERT"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="T",
|
|
subject="S",
|
|
description="D",
|
|
importance="alert",
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_title_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="title must be at most 200"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="x" * 201,
|
|
subject="S",
|
|
description="D",
|
|
importance="info",
|
|
)
|
|
|
|
async def test_subject_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="subject must be at most 500"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="T",
|
|
subject="x" * 501,
|
|
description="D",
|
|
importance="info",
|
|
)
|
|
|
|
async def test_description_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="description must be at most 2000"):
|
|
await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="T",
|
|
subject="S",
|
|
description="x" * 2001,
|
|
importance="info",
|
|
)
|
|
|
|
async def test_title_at_max_accepted(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"createNotification": {"id": "n:1", "importance": "INFO"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification",
|
|
subaction="create",
|
|
title="x" * 200,
|
|
subject="S",
|
|
description="D",
|
|
importance="info",
|
|
)
|
|
assert result["success"] is True
|
|
|
|
|
|
class TestNewNotificationMutations:
|
|
async def test_archive_many_success(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"archiveNotifications": {
|
|
"unread": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
"archive": {"info": 2, "warning": 0, "alert": 0, "total": 2},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification", subaction="archive_many", notification_ids=["n:1", "n:2"]
|
|
)
|
|
assert result["success"] is True
|
|
call_args = _mock_graphql.call_args
|
|
assert call_args[0][1] == {"ids": ["n:1", "n:2"]}
|
|
|
|
async def test_archive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="notification_ids"):
|
|
await tool_fn(action="notification", subaction="archive_many")
|
|
|
|
async def test_unarchive_many_success(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"unarchiveNotifications": {
|
|
"unread": {"info": 2, "warning": 0, "alert": 0, "total": 2},
|
|
"archive": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="notification", subaction="unarchive_many", notification_ids=["n:1", "n:2"]
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_unarchive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="notification_ids"):
|
|
await tool_fn(action="notification", subaction="unarchive_many")
|
|
|
|
async def test_unarchive_all_success(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"unarchiveAll": {
|
|
"unread": {"info": 5, "warning": 1, "alert": 0, "total": 6},
|
|
"archive": {"info": 0, "warning": 0, "alert": 0, "total": 0},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="unarchive_all")
|
|
assert result["success"] is True
|
|
|
|
async def test_unarchive_all_with_importance(self, _mock_graphql: AsyncMock) -> None:
|
|
"""Lowercase importance input must be uppercased before being sent to GraphQL."""
|
|
_mock_graphql.return_value = {
|
|
"unarchiveAll": {"unread": {"total": 1}, "archive": {"total": 0}}
|
|
}
|
|
tool_fn = _make_tool()
|
|
await tool_fn(action="notification", subaction="unarchive_all", importance="warning")
|
|
call_args = _mock_graphql.call_args
|
|
assert call_args[0][1] == {"importance": "WARNING"}
|
|
|
|
async def test_recalculate_success(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"recalculateOverview": {
|
|
"unread": {"info": 3, "warning": 1, "alert": 0, "total": 4},
|
|
"archive": {"info": 10, "warning": 0, "alert": 0, "total": 10},
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="notification", subaction="recalculate")
|
|
assert result["success"] is True
|