feat: add 5 notification mutations + comprehensive refactors from PR review

New notification actions (archive_many, create_unique, unarchive_many,
unarchive_all, recalculate) bring unraid_notifications to 14 actions.

Also includes continuation of CodeRabbit/PR review fixes:
- Remove redundant try-except in virtualization.py (silent failure fix)
- Add QueryCache protocol with get/put/invalidate_all to core/client.py
- Refactor subscriptions (manager, diagnostics, resources, utils)
- Update config (logging, settings) for improved structure
- Expand test coverage: http_layer, safety guards, schema validation
- Minor cleanups: array, docker, health, keys tools

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Jacob Magar
2026-03-13 01:54:55 -04:00
parent 06f18f32fc
commit 60defc35ca
27 changed files with 2508 additions and 423 deletions

View File

@@ -1,14 +1,14 @@
"""MCP tools organized by functional domain.
10 consolidated tools with ~90 actions total:
10 consolidated tools with 76 actions total:
unraid_info - System information queries (19 actions)
unraid_array - Array operations and power management (12 actions)
unraid_array - Array operations and parity management (5 actions)
unraid_storage - Storage, disks, and logs (6 actions)
unraid_docker - Docker container management (15 actions)
unraid_vm - Virtual machine management (9 actions)
unraid_notifications - Notification management (9 actions)
unraid_rclone - Cloud storage remotes (4 actions)
unraid_users - User management (8 actions)
unraid_users - User management (1 action)
unraid_keys - API key management (5 actions)
unraid_health - Health monitoring and diagnostics (3 actions)
"""

View File

@@ -73,7 +73,7 @@ def register_array_tool(mcp: FastMCP) -> None:
"""Manage Unraid array parity checks.
Actions:
parity_start - Start parity check (optional correct=True to fix errors)
parity_start - Start parity check (correct=True to fix errors, correct=False for read-only; required)
parity_pause - Pause running parity check
parity_resume - Resume paused parity check
parity_cancel - Cancel running parity check

View File

@@ -233,8 +233,8 @@ async def _resolve_container_id(container_id: str, *, strict: bool = False) -> s
data = await make_graphql_request(list_query)
containers = safe_get(data, "docker", "containers", default=[])
# Short hex prefix: match by ID prefix before trying name matching
if _DOCKER_SHORT_ID_PATTERN.match(container_id):
# Short hex prefix: match by ID prefix before trying name matching (strict bypasses this)
if not strict and _DOCKER_SHORT_ID_PATTERN.match(container_id):
id_lower = container_id.lower()
matches: list[dict[str, Any]] = []
for c in containers:

View File

@@ -21,6 +21,7 @@ from ..config.settings import (
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.utils import safe_display_url
from ..subscriptions.utils import _analyze_subscription_status
ALL_ACTIONS = {"check", "test_connection", "diagnose"}
@@ -218,42 +219,6 @@ async def _comprehensive_check() -> dict[str, Any]:
}
def _analyze_subscription_status(
status: dict[str, Any],
) -> tuple[int, list[dict[str, Any]]]:
"""Analyze subscription status dict, returning error count and connection issues.
This is the canonical implementation of subscription status analysis.
TODO: subscriptions/diagnostics.py has a similar status-analysis pattern
in diagnose_subscriptions(). That module could import and call this helper
directly to avoid divergence. See Code-H05.
Args:
status: Dict of subscription name -> status info from get_subscription_status().
Returns:
Tuple of (error_count, connection_issues_list).
"""
error_count = 0
connection_issues: list[dict[str, Any]] = []
for sub_name, sub_status in status.items():
runtime = sub_status.get("runtime", {})
conn_state = runtime.get("connection_state", "unknown")
if conn_state in ("error", "auth_failed", "timeout", "max_retries_exceeded"):
error_count += 1
if runtime.get("last_error"):
connection_issues.append(
{
"subscription": sub_name,
"state": conn_state,
"error": runtime["last_error"],
}
)
return error_count, connection_issues
async def _diagnose_subscriptions() -> dict[str, Any]:
"""Import and run subscription diagnostics."""
try:

View File

@@ -114,10 +114,14 @@ def register_keys_tool(mcp: FastMCP) -> None:
if permissions is not None:
input_data["permissions"] = permissions
data = await make_graphql_request(MUTATIONS["create"], {"input": input_data})
return {
"success": True,
"key": (data.get("apiKey") or {}).get("create", {}),
}
created_key = (data.get("apiKey") or {}).get("create")
if not created_key:
return {
"success": False,
"key": {},
"message": "API key creation failed: no data returned from server",
}
return {"success": True, "key": created_key}
if action == "update":
if not key_id:
@@ -128,10 +132,14 @@ def register_keys_tool(mcp: FastMCP) -> None:
if roles is not None:
input_data["roles"] = roles
data = await make_graphql_request(MUTATIONS["update"], {"input": input_data})
return {
"success": True,
"key": (data.get("apiKey") or {}).get("update", {}),
}
updated_key = (data.get("apiKey") or {}).get("update")
if not updated_key:
return {
"success": False,
"key": {},
"message": "API key update failed: no data returned from server",
}
return {"success": True, "key": updated_key}
if action == "delete":
if not key_id:

View File

@@ -50,34 +50,80 @@ MUTATIONS: dict[str, str] = {
""",
"archive": """
mutation ArchiveNotification($id: PrefixedID!) {
archiveNotification(id: $id)
archiveNotification(id: $id) { id title importance }
}
""",
"unread": """
mutation UnreadNotification($id: PrefixedID!) {
unreadNotification(id: $id)
unreadNotification(id: $id) { id title importance }
}
""",
"delete": """
mutation DeleteNotification($id: PrefixedID!, $type: NotificationType!) {
deleteNotification(id: $id, type: $type)
deleteNotification(id: $id, type: $type) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"delete_archived": """
mutation DeleteArchivedNotifications {
deleteArchivedNotifications
deleteArchivedNotifications {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"archive_all": """
mutation ArchiveAllNotifications($importance: NotificationImportance) {
archiveAll(importance: $importance)
archiveAll(importance: $importance) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"archive_many": """
mutation ArchiveNotifications($ids: [PrefixedID!]!) {
archiveNotifications(ids: $ids) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"create_unique": """
mutation NotifyIfUnique($input: NotificationData!) {
notifyIfUnique(input: $input) { id title importance }
}
""",
"unarchive_many": """
mutation UnarchiveNotifications($ids: [PrefixedID!]!) {
unarchiveNotifications(ids: $ids) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"unarchive_all": """
mutation UnarchiveAll($importance: NotificationImportance) {
unarchiveAll(importance: $importance) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"recalculate": """
mutation RecalculateOverview {
recalculateOverview {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
}
DESTRUCTIVE_ACTIONS = {"delete", "delete_archived"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
_VALID_IMPORTANCE = {"ALERT", "WARNING", "NORMAL"}
_VALID_IMPORTANCE = {"ALERT", "WARNING", "INFO"}
NOTIFICATION_ACTIONS = Literal[
"overview",
@@ -89,6 +135,11 @@ NOTIFICATION_ACTIONS = Literal[
"delete",
"delete_archived",
"archive_all",
"archive_many",
"create_unique",
"unarchive_many",
"unarchive_all",
"recalculate",
]
if set(get_args(NOTIFICATION_ACTIONS)) != ALL_ACTIONS:
@@ -108,6 +159,7 @@ def register_notifications_tool(mcp: FastMCP) -> None:
action: NOTIFICATION_ACTIONS,
confirm: bool = False,
notification_id: str | None = None,
notification_ids: list[str] | None = None,
notification_type: str | None = None,
importance: str | None = None,
offset: int = 0,
@@ -129,6 +181,11 @@ def register_notifications_tool(mcp: FastMCP) -> None:
delete - Delete a notification (requires notification_id, notification_type, confirm=True)
delete_archived - Delete all archived notifications (requires confirm=True)
archive_all - Archive all notifications (optional importance filter)
archive_many - Archive multiple notifications by ID (requires notification_ids)
create_unique - Create notification only if no equivalent unread exists (requires title, subject, description, importance)
unarchive_many - Move notifications back to unread (requires notification_ids)
unarchive_all - Move all archived notifications to unread (optional importance filter)
recalculate - Recompute overview counts from disk
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
@@ -212,6 +269,55 @@ def register_notifications_tool(mcp: FastMCP) -> None:
data = await make_graphql_request(MUTATIONS["archive_all"], variables)
return {"success": True, "action": "archive_all", "data": data}
if action == "archive_many":
if not notification_ids:
raise ToolError("notification_ids is required for 'archive_many' action")
data = await make_graphql_request(
MUTATIONS["archive_many"], {"ids": notification_ids}
)
return {"success": True, "action": "archive_many", "data": data}
if action == "create_unique":
if title is None or subject is None or description is None or importance is None:
raise ToolError(
"create_unique requires title, subject, description, and importance"
)
if importance.upper() not in _VALID_IMPORTANCE:
raise ToolError(
f"importance must be one of: {', '.join(sorted(_VALID_IMPORTANCE))}. "
f"Got: '{importance}'"
)
input_data = {
"title": title,
"subject": subject,
"description": description,
"importance": importance.upper(),
}
data = await make_graphql_request(MUTATIONS["create_unique"], {"input": input_data})
notification = data.get("notifyIfUnique")
if notification is None:
return {"success": True, "duplicate": True, "data": None}
return {"success": True, "duplicate": False, "data": notification}
if action == "unarchive_many":
if not notification_ids:
raise ToolError("notification_ids is required for 'unarchive_many' action")
data = await make_graphql_request(
MUTATIONS["unarchive_many"], {"ids": notification_ids}
)
return {"success": True, "action": "unarchive_many", "data": data}
if action == "unarchive_all":
vars_: dict[str, Any] | None = None
if importance:
vars_ = {"importance": importance.upper()}
data = await make_graphql_request(MUTATIONS["unarchive_all"], vars_)
return {"success": True, "action": "unarchive_all", "data": data}
if action == "recalculate":
data = await make_graphql_request(MUTATIONS["recalculate"])
return {"success": True, "action": "recalculate", "data": data}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Notifications tool registered successfully")

View File

@@ -114,56 +114,42 @@ def register_vm_tool(mcp: FastMCP) -> None:
raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.")
with tool_error_handler("vm", action, logger):
try:
logger.info(f"Executing unraid_vm action={action}")
logger.info(f"Executing unraid_vm action={action}")
if action == "list":
data = await make_graphql_request(QUERIES["list"])
if data.get("vms"):
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
if isinstance(vms, dict):
vms = [vms]
return {"vms": vms}
return {"vms": []}
if action == "details":
data = await make_graphql_request(QUERIES["details"])
if not data.get("vms"):
raise ToolError("No VM data returned from server")
if action == "list":
data = await make_graphql_request(QUERIES["list"])
if data.get("vms"):
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
if isinstance(vms, dict):
vms = [vms]
for vm in vms:
if (
vm.get("uuid") == vm_id
or vm.get("id") == vm_id
or vm.get("name") == vm_id
):
return dict(vm)
available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms]
raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}")
return {"vms": vms}
return {"vms": []}
# Mutations
if action in MUTATIONS:
data = await make_graphql_request(MUTATIONS[action], {"id": vm_id})
field = _MUTATION_FIELDS.get(action, action)
if data.get("vm") and field in data["vm"]:
return {
"success": data["vm"][field],
"action": action,
"vm_id": vm_id,
}
raise ToolError(f"Failed to {action} VM or unexpected response")
if action == "details":
data = await make_graphql_request(QUERIES["details"])
if not data.get("vms"):
raise ToolError("No VM data returned from server")
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
if isinstance(vms, dict):
vms = [vms]
for vm in vms:
if vm.get("uuid") == vm_id or vm.get("id") == vm_id or vm.get("name") == vm_id:
return dict(vm)
available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms]
raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}")
raise ToolError(f"Unhandled action '{action}' — this is a bug")
# Mutations
if action in MUTATIONS:
data = await make_graphql_request(MUTATIONS[action], {"id": vm_id})
field = _MUTATION_FIELDS.get(action, action)
if data.get("vm") and field in data["vm"]:
return {
"success": data["vm"][field],
"action": action,
"vm_id": vm_id,
}
raise ToolError(f"Failed to {action} VM or unexpected response")
except ToolError:
raise
except Exception as e:
if "VMs are not available" in str(e):
raise ToolError(
"VMs not available on this server. Check VM support is enabled."
) from e
raise
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("VM tool registered successfully")