forked from HomeLab/unraid-mcp
Addresses all critical, high, medium, and low issues from full codebase review. 494 tests pass, ruff clean, ty type-check clean. Security: - Add tool_error_handler context manager (exceptions.py) — standardised error handling, eliminates 11 bare except-reraise patterns - Remove unused exception subclasses (ConfigurationError, UnraidAPIError, SubscriptionError, ValidationError, IdempotentOperationError) - Harden GraphQL subscription query validator with allow-list and forbidden-keyword regex (diagnostics.py) - Add input validation for rclone create_remote config_data: injection, path-traversal, and key-count limits (rclone.py) - Validate notifications importance enum before GraphQL request (notifications.py) - Sanitise HTTP/network/JSON error messages — no raw exception strings leaked to clients (client.py) - Strip path/creds from displayed API URL via _safe_display_url (health.py) - Enable Ruff S (bandit) rule category in pyproject.toml - Harden container mutations to strict-only matching — no fuzzy/substring for destructive operations (docker.py) Performance: - Token-bucket rate limiter (90 tokens, 9 req/s) with 429 retry backoff (client.py) - Lazy asyncio.Lock init via _get_client_lock() — fixes event-loop module-load crash (client.py) - Double-checked locking in get_http_client() for fast-path (client.py) - Short hex container ID fast-path skips list fetch (docker.py) - Cap resource_data log content to 1 MB / 5,000 lines (manager.py) - Reset reconnect counter after 30 s stable connection (manager.py) - Move tail_lines validation to module level; enforce 10,000 line cap (storage.py, docker.py) - force_terminal=True removed from logging RichHandler (logging.py) Architecture: - Register diagnostic tools in server startup (server.py) - Move ALL_ACTIONS computation to module level in all tools - Consolidate format_kb / format_bytes into shared core/utils.py - Add _safe_get() helper in core/utils.py for nested dict traversal - Extract _analyze_subscription_status() from health.py diagnose handler - Validate required config at startup — fail fast with CRITICAL log (server.py) Code quality: - Remove ~90 lines of dead Rich formatting helpers from logging.py - Remove dead self.websocket attribute from SubscriptionManager - Remove dead setup_uvicorn_logging() wrapper - Move _VALID_IMPORTANCE to module level (N806 fix) - Add slots=True to all three dataclasses (SubscriptionData, SystemHealth, APIResponse) - Fix None rendering as literal "None" string in info.py summaries - Change fuzzy-match log messages from INFO to DEBUG (docker.py) - UTC-aware datetimes throughout (manager.py, diagnostics.py) Infrastructure: - Upgrade base image python:3.11-slim → python:3.12-slim (Dockerfile) - Add non-root appuser (UID/GID 1000) with HEALTHCHECK (Dockerfile) - Add read_only, cap_drop: ALL, tmpfs /tmp to docker-compose.yml - Single-source version via importlib.metadata (pyproject.toml → __init__.py) - Add open_timeout to all websockets.connect() calls Tests: - Update error message matchers to match sanitised messages (test_client.py) - Fix patch targets for UNRAID_API_URL → utils module (test_subscriptions.py) - Fix importance="info" → importance="normal" (test_notifications.py, http_layer) - Fix naive datetime fixtures → UTC-aware (test_subscriptions.py) Co-authored-by: Claude <claude@anthropic.com>
154 lines
6.0 KiB
Python
154 lines
6.0 KiB
Python
"""Tests for unraid_notifications tool."""
|
|
|
|
from collections.abc import Generator
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
import pytest
|
|
from conftest import make_tool_fn
|
|
|
|
from unraid_mcp.core.exceptions import ToolError
|
|
|
|
|
|
@pytest.fixture
|
|
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
|
with patch(
|
|
"unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock
|
|
) as mock:
|
|
yield mock
|
|
|
|
|
|
def _make_tool():
|
|
return make_tool_fn(
|
|
"unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"
|
|
)
|
|
|
|
|
|
class TestNotificationsValidation:
|
|
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="destructive"):
|
|
await tool_fn(action="delete", notification_id="n:1", notification_type="UNREAD")
|
|
|
|
async def test_delete_archived_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="destructive"):
|
|
await tool_fn(action="delete_archived")
|
|
|
|
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires title"):
|
|
await tool_fn(action="create")
|
|
|
|
async def test_archive_requires_id(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="notification_id"):
|
|
await tool_fn(action="archive")
|
|
|
|
async def test_delete_requires_id_and_type(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="requires notification_id"):
|
|
await tool_fn(action="delete", confirm=True)
|
|
|
|
|
|
class TestNotificationsActions:
|
|
async def test_overview(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {
|
|
"overview": {
|
|
"unread": {"info": 5, "warning": 2, "alert": 0, "total": 7},
|
|
"archive": {"info": 10, "warning": 1, "alert": 0, "total": 11},
|
|
}
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="overview")
|
|
assert result["unread"]["total"] == 7
|
|
|
|
async def test_list(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Test", "importance": "INFO"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="list")
|
|
assert len(result["notifications"]) == 1
|
|
|
|
async def test_warnings(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"warningsAndAlerts": [{"id": "n:1", "importance": "WARNING"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="warnings")
|
|
assert len(result["warnings"]) == 1
|
|
|
|
async def test_create(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {
|
|
"createNotification": {"id": "n:new", "title": "Test", "importance": "INFO"}
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="create",
|
|
title="Test",
|
|
subject="Test Subject",
|
|
description="Test Desc",
|
|
importance="normal",
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"archiveNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="archive", notification_id="n:1")
|
|
assert result["success"] is True
|
|
|
|
async def test_delete_with_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"deleteNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="delete",
|
|
notification_id="n:1",
|
|
notification_type="unread",
|
|
confirm=True,
|
|
)
|
|
assert result["success"] is True
|
|
|
|
async def test_archive_all(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"archiveAll": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="archive_all")
|
|
assert result["success"] is True
|
|
|
|
async def test_unread_notification(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"unreadNotification": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="unread", notification_id="n:1")
|
|
assert result["success"] is True
|
|
assert result["action"] == "unread"
|
|
|
|
async def test_list_with_importance_filter(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"notifications": {"list": [{"id": "n:1", "title": "Alert", "importance": "WARNING"}]}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="list", importance="warning", limit=10, offset=5)
|
|
assert len(result["notifications"]) == 1
|
|
call_args = _mock_graphql.call_args
|
|
filter_var = call_args[0][1]["filter"]
|
|
assert filter_var["importance"] == "WARNING"
|
|
assert filter_var["limit"] == 10
|
|
assert filter_var["offset"] == 5
|
|
|
|
async def test_delete_archived(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"notifications": {"deleteArchivedNotifications": True}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="delete_archived", confirm=True)
|
|
assert result["success"] is True
|
|
assert result["action"] == "delete_archived"
|
|
|
|
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.side_effect = RuntimeError("boom")
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="boom"):
|
|
await tool_fn(action="overview")
|