mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-02 00:04:45 -08:00
Resolves review threads:
- PRRT_kwDOO6Hdxs5vNroH (Thread 36): tests now verify generic ToolError message
instead of raw exception text (security: no sensitive data in user-facing errors)
- PRRT_kwDOO6Hdxs5vNuYg (Thread 14): format_kb KB branch now uses :.2f like all
other branches (consistency fix)
- I001/F841/PERF401: fix ruff violations in http_layer, integration, safety tests
Changes:
- tests/test_array.py: match "Failed to execute array/parity_status" (not raw error)
- tests/test_keys.py: match "Failed to execute keys/list" (not raw error)
- tests/test_notifications.py: match "Failed to execute notifications/overview" (not raw error)
- tests/test_storage.py: update format_kb assertion to "512.00 KB" (:.2f format)
- tests/http_layer/test_request_construction.py: remove unused result var (F841)
+ fix import sort (I001)
- tests/safety/test_destructive_guards.py: use list.extend (PERF401) + fix import sort
- unraid_mcp/core/utils.py: format_kb returns f"{k:.2f} KB" for sub-MB values
Co-authored-by: @coderabbitai
Co-authored-by: @cubic-dev-ai
Co-authored-by: @copilot-pull-request-reviewer
238 lines
9.1 KiB
Python
238 lines
9.1 KiB
Python
"""Tests for unraid_notifications tool."""
|
|
|
|
from collections.abc import Generator
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
import pytest
|
|
from conftest import make_tool_fn
|
|
|
|
from unraid_mcp.core.exceptions import ToolError
|
|
|
|
|
|
@pytest.fixture
|
|
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
|
with patch(
|
|
"unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock
|
|
) as mock:
|
|
yield mock
|
|
|
|
|
|
def _make_tool():
|
|
return make_tool_fn(
|
|
"unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"
|
|
)
|
|
|
|
|
|
class TestNotificationsValidation:
|
|
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="destructive"):
|
|
await tool_fn(action="delete", notification_id="n:1", notification_type="UNREAD")
|
|
|
|
async def test_delete_archived_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="destructive"):
|
|
await tool_fn(action="delete_archived")
|
|
|
|
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires title"):
|
|
await tool_fn(action="create")
|
|
|
|
async def test_archive_requires_id(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="notification_id"):
|
|
await tool_fn(action="archive")
|
|
|
|
async def test_delete_requires_id_and_type(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires notification_id"):
|
|
await tool_fn(action="delete", confirm=True)
|
|
|
|
|
|
class TestNotificationsActions:
|
|
async def test_overview(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {
|
|
"overview": {
|
|
"unread": {"info": 5, "warning": 2, "alert": 0, "total": 7},
|
|
"archive": {"info": 10, "warning": 1, "alert": 0, "total": 11},
|
|
}
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="overview")
|
|
assert result["unread"]["total"] == 7
|
|
|
|
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="list")
|
|
assert len(result["notifications"]) == 1
|
|
|
|
async def test_warnings(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"warningsAndAlerts": [{"id": "n:1", "importance": "WARNING"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="warnings")
|
|
assert len(result["warnings"]) == 1
|
|
|
|
async def test_create(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {
|
|
"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="create",
|
|
title="Test",
|
|
subject="Test Subject",
|
|
description="Test Desc",
|
|
importance="normal",
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"archiveNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="archive", notification_id="n:1")
|
|
assert result["success"] is True
|
|
|
|
async def test_delete_with_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"deleteNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="delete",
|
|
notification_id="n:1",
|
|
notification_type="unread",
|
|
confirm=True,
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_all(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"archiveAll": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="archive_all")
|
|
assert result["success"] is True
|
|
|
|
async def test_unread_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"unreadNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="unread", notification_id="n:1")
|
|
assert result["success"] is True
|
|
assert result["action"] == "unread"
|
|
|
|
async def test_list_with_importance_filter(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="list", importance="warning", limit=10, offset=5)
|
|
assert len(result["notifications"]) == 1
|
|
call_args = _mock_graphql.call_args
|
|
filter_var = call_args[0][1]["filter"]
|
|
assert filter_var["importance"] == "WARNING"
|
|
assert filter_var["limit"] == 10
|
|
assert filter_var["offset"] == 5
|
|
|
|
async def test_delete_archived(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"deleteArchivedNotifications": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="delete_archived", confirm=True)
|
|
assert result["success"] is True
|
|
assert result["action"] == "delete_archived"
|
|
|
|
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.side_effect = RuntimeError("boom")
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Failed to execute notifications/overview"):
|
|
await tool_fn(action="overview")
|
|
|
|
|
|
class TestNotificationsCreateValidation:
|
|
"""Tests for importance enum and field length validation added in this PR."""
|
|
|
|
async def test_invalid_importance_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="importance must be one of"):
|
|
await tool_fn(
|
|
action="create",
|
|
title="T",
|
|
subject="S",
|
|
description="D",
|
|
importance="invalid",
|
|
)
|
|
|
|
async def test_info_importance_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
"""INFO is listed in old docstring examples but rejected by the validator."""
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="importance must be one of"):
|
|
await tool_fn(
|
|
action="create",
|
|
title="T",
|
|
subject="S",
|
|
description="D",
|
|
importance="info",
|
|
)
|
|
|
|
async def test_alert_importance_accepted(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"createNotification": {"id": "n:1", "importance": "ALERT"}}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="create", title="T", subject="S", description="D", importance="alert"
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_title_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="title must be at most 200"):
|
|
await tool_fn(
|
|
action="create",
|
|
title="x" * 201,
|
|
subject="S",
|
|
description="D",
|
|
importance="normal",
|
|
)
|
|
|
|
async def test_subject_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="subject must be at most 500"):
|
|
await tool_fn(
|
|
action="create",
|
|
title="T",
|
|
subject="x" * 501,
|
|
description="D",
|
|
importance="normal",
|
|
)
|
|
|
|
async def test_description_too_long_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="description must be at most 2000"):
|
|
await tool_fn(
|
|
action="create",
|
|
title="T",
|
|
subject="S",
|
|
description="x" * 2001,
|
|
importance="normal",
|
|
)
|
|
|
|
async def test_title_at_max_accepted(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"createNotification": {"id": "n:1", "importance": "NORMAL"}}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="create",
|
|
title="x" * 200,
|
|
subject="S",
|
|
description="D",
|
|
importance="normal",
|
|
)
|
|
assert result["success"] is True
|