forked from HomeLab/unraid-mcp
refactor: comprehensive code review fixes across 31 files
Addresses all critical, high, medium, and low issues from full codebase review. 494 tests pass, ruff clean, ty type-check clean. Security: - Add tool_error_handler context manager (exceptions.py) — standardised error handling, eliminates 11 bare except-reraise patterns - Remove unused exception subclasses (ConfigurationError, UnraidAPIError, SubscriptionError, ValidationError, IdempotentOperationError) - Harden GraphQL subscription query validator with allow-list and forbidden-keyword regex (diagnostics.py) - Add input validation for rclone create_remote config_data: injection, path-traversal, and key-count limits (rclone.py) - Validate notifications importance enum before GraphQL request (notifications.py) - Sanitise HTTP/network/JSON error messages — no raw exception strings leaked to clients (client.py) - Strip path/creds from displayed API URL via _safe_display_url (health.py) - Enable Ruff S (bandit) rule category in pyproject.toml - Harden container mutations to strict-only matching — no fuzzy/substring for destructive operations (docker.py) Performance: - Token-bucket rate limiter (90 tokens, 9 req/s) with 429 retry backoff (client.py) - Lazy asyncio.Lock init via _get_client_lock() — fixes event-loop module-load crash (client.py) - Double-checked locking in get_http_client() for fast-path (client.py) - Short hex container ID fast-path skips list fetch (docker.py) - Cap resource_data log content to 1 MB / 5,000 lines (manager.py) - Reset reconnect counter after 30 s stable connection (manager.py) - Move tail_lines validation to module level; enforce 10,000 line cap (storage.py, docker.py) - force_terminal=True removed from logging RichHandler (logging.py) Architecture: - Register diagnostic tools in server startup (server.py) - Move ALL_ACTIONS computation to module level in all tools - Consolidate format_kb / format_bytes into shared core/utils.py - Add _safe_get() helper in core/utils.py for nested dict traversal - Extract _analyze_subscription_status() from health.py diagnose handler - Validate required config at startup — fail fast with CRITICAL log (server.py) Code quality: - Remove ~90 lines of dead Rich formatting helpers from logging.py - Remove dead self.websocket attribute from SubscriptionManager - Remove dead setup_uvicorn_logging() wrapper - Move _VALID_IMPORTANCE to module level (N806 fix) - Add slots=True to all three dataclasses (SubscriptionData, SystemHealth, APIResponse) - Fix None rendering as literal "None" string in info.py summaries - Change fuzzy-match log messages from INFO to DEBUG (docker.py) - UTC-aware datetimes throughout (manager.py, diagnostics.py) Infrastructure: - Upgrade base image python:3.11-slim → python:3.12-slim (Dockerfile) - Add non-root appuser (UID/GID 1000) with HEALTHCHECK (Dockerfile) - Add read_only, cap_drop: ALL, tmpfs /tmp to docker-compose.yml - Single-source version via importlib.metadata (pyproject.toml → __init__.py) - Add open_timeout to all websockets.connect() calls Tests: - Update error message matchers to match sanitised messages (test_client.py) - Fix patch targets for UNRAID_API_URL → utils module (test_subscriptions.py) - Fix importance="info" → importance="normal" (test_notifications.py, http_layer) - Fix naive datetime fixtures → UTC-aware (test_subscriptions.py) Co-authored-by: Claude <claude@anthropic.com>
This commit is contained in:
@@ -9,7 +9,7 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -74,7 +74,7 @@ def register_array_tool(mcp: FastMCP) -> None:
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
try:
|
||||
with tool_error_handler("array", action, logger):
|
||||
logger.info(f"Executing unraid_array action={action}")
|
||||
|
||||
if action in QUERIES:
|
||||
@@ -95,10 +95,4 @@ def register_array_tool(mcp: FastMCP) -> None:
|
||||
"data": data,
|
||||
}
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_array action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute array/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Array tool registered successfully")
|
||||
|
||||
@@ -11,7 +11,8 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
from ..core.utils import safe_get
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -99,6 +100,10 @@ MUTATIONS: dict[str, str] = {
|
||||
}
|
||||
|
||||
DESTRUCTIVE_ACTIONS = {"remove"}
|
||||
_MUTATION_ACTIONS = {"start", "stop", "restart", "pause", "unpause", "remove", "update"}
|
||||
# NOTE (Code-M-07): "details" and "logs" are listed here because they require a
|
||||
# container_id parameter, but unlike mutations they use fuzzy name matching (not
|
||||
# strict). This is intentional: read-only queries are safe with fuzzy matching.
|
||||
_ACTIONS_REQUIRING_CONTAINER_ID = {
|
||||
"start",
|
||||
"stop",
|
||||
@@ -111,6 +116,7 @@ _ACTIONS_REQUIRING_CONTAINER_ID = {
|
||||
"logs",
|
||||
}
|
||||
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) | {"restart"}
|
||||
_MAX_TAIL_LINES = 10_000
|
||||
|
||||
DOCKER_ACTIONS = Literal[
|
||||
"list",
|
||||
@@ -130,33 +136,28 @@ DOCKER_ACTIONS = Literal[
|
||||
"check_updates",
|
||||
]
|
||||
|
||||
# Docker container IDs: 64 hex chars + optional suffix (e.g., ":local")
|
||||
# Full PrefixedID: 64 hex chars + optional suffix (e.g., ":local")
|
||||
_DOCKER_ID_PATTERN = re.compile(r"^[a-f0-9]{64}(:[a-z0-9]+)?$", re.IGNORECASE)
|
||||
|
||||
|
||||
def _safe_get(data: dict[str, Any], *keys: str, default: Any = None) -> Any:
|
||||
"""Safely traverse nested dict keys, handling None intermediates."""
|
||||
current = data
|
||||
for key in keys:
|
||||
if not isinstance(current, dict):
|
||||
return default
|
||||
current = current.get(key)
|
||||
return current if current is not None else default
|
||||
# Short hex prefix: at least 12 hex chars (standard Docker short ID length)
|
||||
_DOCKER_SHORT_ID_PATTERN = re.compile(r"^[a-f0-9]{12,63}$", re.IGNORECASE)
|
||||
|
||||
|
||||
def find_container_by_identifier(
|
||||
identifier: str, containers: list[dict[str, Any]]
|
||||
identifier: str, containers: list[dict[str, Any]], *, strict: bool = False
|
||||
) -> dict[str, Any] | None:
|
||||
"""Find a container by ID or name with fuzzy matching.
|
||||
"""Find a container by ID or name with optional fuzzy matching.
|
||||
|
||||
Match priority:
|
||||
1. Exact ID match
|
||||
2. Exact name match (case-sensitive)
|
||||
|
||||
When strict=False (default), also tries:
|
||||
3. Name starts with identifier (case-insensitive)
|
||||
4. Name contains identifier as substring (case-insensitive)
|
||||
|
||||
Note: Short identifiers (e.g. "db") may match unintended containers
|
||||
via substring. Use more specific names or IDs for precision.
|
||||
When strict=True, only exact matches (1 & 2) are used.
|
||||
Use strict=True for mutations to prevent targeting the wrong container.
|
||||
"""
|
||||
if not containers:
|
||||
return None
|
||||
@@ -168,20 +169,24 @@ def find_container_by_identifier(
|
||||
if identifier in c.get("names", []):
|
||||
return c
|
||||
|
||||
# Strict mode: no fuzzy matching allowed
|
||||
if strict:
|
||||
return None
|
||||
|
||||
id_lower = identifier.lower()
|
||||
|
||||
# Priority 3: prefix match (more precise than substring)
|
||||
for c in containers:
|
||||
for name in c.get("names", []):
|
||||
if name.lower().startswith(id_lower):
|
||||
logger.info(f"Prefix match: '{identifier}' -> '{name}'")
|
||||
logger.debug(f"Prefix match: '{identifier}' -> '{name}'")
|
||||
return c
|
||||
|
||||
# Priority 4: substring match (least precise)
|
||||
for c in containers:
|
||||
for name in c.get("names", []):
|
||||
if id_lower in name.lower():
|
||||
logger.info(f"Substring match: '{identifier}' -> '{name}'")
|
||||
logger.debug(f"Substring match: '{identifier}' -> '{name}'")
|
||||
return c
|
||||
|
||||
return None
|
||||
@@ -195,27 +200,62 @@ def get_available_container_names(containers: list[dict[str, Any]]) -> list[str]
|
||||
return names
|
||||
|
||||
|
||||
async def _resolve_container_id(container_id: str) -> str:
|
||||
"""Resolve a container name/identifier to its actual PrefixedID."""
|
||||
def _looks_like_container_id(identifier: str) -> bool:
|
||||
"""Check if an identifier looks like a container ID (full or short hex prefix)."""
|
||||
return bool(_DOCKER_ID_PATTERN.match(identifier) or _DOCKER_SHORT_ID_PATTERN.match(identifier))
|
||||
|
||||
|
||||
async def _resolve_container_id(container_id: str, *, strict: bool = False) -> str:
|
||||
"""Resolve a container name/identifier to its actual PrefixedID.
|
||||
|
||||
Optimization: if the identifier is a full 64-char hex ID (with optional
|
||||
:suffix), skip the container list fetch entirely and use it directly.
|
||||
If it's a short hex prefix (12-63 chars), fetch the list and match by
|
||||
ID prefix. Only fetch the container list for name-based lookups.
|
||||
|
||||
Args:
|
||||
container_id: Container name or ID to resolve
|
||||
strict: When True, only exact name/ID matches are allowed (no fuzzy).
|
||||
Use for mutations to prevent targeting the wrong container.
|
||||
"""
|
||||
# Full PrefixedID: skip the list fetch entirely
|
||||
if _DOCKER_ID_PATTERN.match(container_id):
|
||||
return container_id
|
||||
|
||||
logger.info(f"Resolving container identifier '{container_id}'")
|
||||
logger.info(f"Resolving container identifier '{container_id}' (strict={strict})")
|
||||
list_query = """
|
||||
query ResolveContainerID {
|
||||
docker { containers(skipCache: true) { id names } }
|
||||
}
|
||||
"""
|
||||
data = await make_graphql_request(list_query)
|
||||
containers = _safe_get(data, "docker", "containers", default=[])
|
||||
resolved = find_container_by_identifier(container_id, containers)
|
||||
containers = safe_get(data, "docker", "containers", default=[])
|
||||
|
||||
# Short hex prefix: match by ID prefix before trying name matching
|
||||
if _DOCKER_SHORT_ID_PATTERN.match(container_id):
|
||||
id_lower = container_id.lower()
|
||||
for c in containers:
|
||||
cid = (c.get("id") or "").lower()
|
||||
if cid.startswith(id_lower) or cid.split(":")[0].startswith(id_lower):
|
||||
actual_id = str(c.get("id", ""))
|
||||
logger.info(f"Resolved short ID '{container_id}' -> '{actual_id}'")
|
||||
return actual_id
|
||||
|
||||
resolved = find_container_by_identifier(container_id, containers, strict=strict)
|
||||
if resolved:
|
||||
actual_id = str(resolved.get("id", ""))
|
||||
logger.info(f"Resolved '{container_id}' -> '{actual_id}'")
|
||||
return actual_id
|
||||
|
||||
available = get_available_container_names(containers)
|
||||
msg = f"Container '{container_id}' not found."
|
||||
if strict:
|
||||
msg = (
|
||||
f"Container '{container_id}' not found by exact match. "
|
||||
f"Mutations require an exact container name or full ID — "
|
||||
f"fuzzy/substring matching is not allowed for safety."
|
||||
)
|
||||
else:
|
||||
msg = f"Container '{container_id}' not found."
|
||||
if available:
|
||||
msg += f" Available: {', '.join(available[:10])}"
|
||||
raise ToolError(msg)
|
||||
@@ -264,38 +304,40 @@ def register_docker_tool(mcp: FastMCP) -> None:
|
||||
if action == "network_details" and not network_id:
|
||||
raise ToolError("network_id is required for 'network_details' action")
|
||||
|
||||
try:
|
||||
if tail_lines < 1 or tail_lines > _MAX_TAIL_LINES:
|
||||
raise ToolError(f"tail_lines must be between 1 and {_MAX_TAIL_LINES}, got {tail_lines}")
|
||||
|
||||
with tool_error_handler("docker", action, logger):
|
||||
logger.info(f"Executing unraid_docker action={action}")
|
||||
|
||||
# --- Read-only queries ---
|
||||
if action == "list":
|
||||
data = await make_graphql_request(QUERIES["list"])
|
||||
containers = _safe_get(data, "docker", "containers", default=[])
|
||||
return {"containers": list(containers) if isinstance(containers, list) else []}
|
||||
containers = safe_get(data, "docker", "containers", default=[])
|
||||
return {"containers": containers}
|
||||
|
||||
if action == "details":
|
||||
# Resolve name -> ID first (skips list fetch if already an ID)
|
||||
actual_id = await _resolve_container_id(container_id or "")
|
||||
data = await make_graphql_request(QUERIES["details"])
|
||||
containers = _safe_get(data, "docker", "containers", default=[])
|
||||
container = find_container_by_identifier(container_id or "", containers)
|
||||
if container:
|
||||
return container
|
||||
available = get_available_container_names(containers)
|
||||
msg = f"Container '{container_id}' not found."
|
||||
if available:
|
||||
msg += f" Available: {', '.join(available[:10])}"
|
||||
raise ToolError(msg)
|
||||
containers = safe_get(data, "docker", "containers", default=[])
|
||||
# Match by resolved ID (exact match, no second list fetch needed)
|
||||
for c in containers:
|
||||
if c.get("id") == actual_id:
|
||||
return c
|
||||
raise ToolError(f"Container '{container_id}' not found in details response.")
|
||||
|
||||
if action == "logs":
|
||||
actual_id = await _resolve_container_id(container_id or "")
|
||||
data = await make_graphql_request(
|
||||
QUERIES["logs"], {"id": actual_id, "tail": tail_lines}
|
||||
)
|
||||
return {"logs": _safe_get(data, "docker", "logs")}
|
||||
return {"logs": safe_get(data, "docker", "logs")}
|
||||
|
||||
if action == "networks":
|
||||
data = await make_graphql_request(QUERIES["networks"])
|
||||
networks = data.get("dockerNetworks", [])
|
||||
return {"networks": list(networks) if isinstance(networks, list) else []}
|
||||
return {"networks": networks}
|
||||
|
||||
if action == "network_details":
|
||||
data = await make_graphql_request(QUERIES["network_details"], {"id": network_id})
|
||||
@@ -303,17 +345,17 @@ def register_docker_tool(mcp: FastMCP) -> None:
|
||||
|
||||
if action == "port_conflicts":
|
||||
data = await make_graphql_request(QUERIES["port_conflicts"])
|
||||
conflicts = _safe_get(data, "docker", "portConflicts", default=[])
|
||||
return {"port_conflicts": list(conflicts) if isinstance(conflicts, list) else []}
|
||||
conflicts = safe_get(data, "docker", "portConflicts", default=[])
|
||||
return {"port_conflicts": conflicts}
|
||||
|
||||
if action == "check_updates":
|
||||
data = await make_graphql_request(QUERIES["check_updates"])
|
||||
statuses = _safe_get(data, "docker", "containerUpdateStatuses", default=[])
|
||||
return {"update_statuses": list(statuses) if isinstance(statuses, list) else []}
|
||||
statuses = safe_get(data, "docker", "containerUpdateStatuses", default=[])
|
||||
return {"update_statuses": statuses}
|
||||
|
||||
# --- Mutations ---
|
||||
# --- Mutations (strict matching: no fuzzy/substring) ---
|
||||
if action == "restart":
|
||||
actual_id = await _resolve_container_id(container_id or "")
|
||||
actual_id = await _resolve_container_id(container_id or "", strict=True)
|
||||
# Stop (idempotent: treat "already stopped" as success)
|
||||
stop_data = await make_graphql_request(
|
||||
MUTATIONS["stop"],
|
||||
@@ -330,7 +372,7 @@ def register_docker_tool(mcp: FastMCP) -> None:
|
||||
if start_data.get("idempotent_success"):
|
||||
result = {}
|
||||
else:
|
||||
result = _safe_get(start_data, "docker", "start", default={})
|
||||
result = safe_get(start_data, "docker", "start", default={})
|
||||
response: dict[str, Any] = {
|
||||
"success": True,
|
||||
"action": "restart",
|
||||
@@ -342,12 +384,12 @@ def register_docker_tool(mcp: FastMCP) -> None:
|
||||
|
||||
if action == "update_all":
|
||||
data = await make_graphql_request(MUTATIONS["update_all"])
|
||||
results = _safe_get(data, "docker", "updateAllContainers", default=[])
|
||||
results = safe_get(data, "docker", "updateAllContainers", default=[])
|
||||
return {"success": True, "action": "update_all", "containers": results}
|
||||
|
||||
# Single-container mutations
|
||||
if action in MUTATIONS:
|
||||
actual_id = await _resolve_container_id(container_id or "")
|
||||
actual_id = await _resolve_container_id(container_id or "", strict=True)
|
||||
op_context: dict[str, str] | None = (
|
||||
{"operation": action} if action in ("start", "stop") else None
|
||||
)
|
||||
@@ -382,10 +424,4 @@ def register_docker_tool(mcp: FastMCP) -> None:
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_docker action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute docker/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Docker tool registered successfully")
|
||||
|
||||
@@ -7,6 +7,7 @@ connection testing, and subscription diagnostics.
|
||||
import datetime
|
||||
import time
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
@@ -19,9 +20,30 @@ from ..config.settings import (
|
||||
VERSION,
|
||||
)
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
def _safe_display_url(url: str | None) -> str | None:
|
||||
"""Return a redacted URL showing only scheme + host + port.
|
||||
|
||||
Strips path, query parameters, credentials, and fragments to avoid
|
||||
leaking internal network topology or embedded secrets (CWE-200).
|
||||
"""
|
||||
if not url:
|
||||
return None
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
host = parsed.hostname or "unknown"
|
||||
if parsed.port:
|
||||
return f"{parsed.scheme}://{host}:{parsed.port}"
|
||||
return f"{parsed.scheme}://{host}"
|
||||
except Exception:
|
||||
# If parsing fails, show nothing rather than leaking the raw URL
|
||||
return "<unparseable>"
|
||||
|
||||
|
||||
ALL_ACTIONS = {"check", "test_connection", "diagnose"}
|
||||
|
||||
HEALTH_ACTIONS = Literal["check", "test_connection", "diagnose"]
|
||||
|
||||
# Severity ordering: only upgrade, never downgrade
|
||||
@@ -53,12 +75,10 @@ def register_health_tool(mcp: FastMCP) -> None:
|
||||
test_connection - Quick connectivity test (just checks { online })
|
||||
diagnose - Subscription system diagnostics
|
||||
"""
|
||||
if action not in ("check", "test_connection", "diagnose"):
|
||||
raise ToolError(
|
||||
f"Invalid action '{action}'. Must be one of: check, test_connection, diagnose"
|
||||
)
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
try:
|
||||
with tool_error_handler("health", action, logger):
|
||||
logger.info(f"Executing unraid_health action={action}")
|
||||
|
||||
if action == "test_connection":
|
||||
@@ -79,12 +99,6 @@ def register_health_tool(mcp: FastMCP) -> None:
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_health action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute health/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Health tool registered successfully")
|
||||
|
||||
|
||||
@@ -111,7 +125,7 @@ async def _comprehensive_check() -> dict[str, Any]:
|
||||
overview { unread { alert warning total } }
|
||||
}
|
||||
docker {
|
||||
containers(skipCache: true) { id state status }
|
||||
containers { id state status }
|
||||
}
|
||||
}
|
||||
"""
|
||||
@@ -135,7 +149,7 @@ async def _comprehensive_check() -> dict[str, Any]:
|
||||
if info:
|
||||
health_info["unraid_system"] = {
|
||||
"status": "connected",
|
||||
"url": UNRAID_API_URL,
|
||||
"url": _safe_display_url(UNRAID_API_URL),
|
||||
"machine_id": info.get("machineId"),
|
||||
"version": info.get("versions", {}).get("unraid"),
|
||||
"uptime": info.get("os", {}).get("uptime"),
|
||||
@@ -215,6 +229,42 @@ async def _comprehensive_check() -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def _analyze_subscription_status(
|
||||
status: dict[str, Any],
|
||||
) -> tuple[int, list[dict[str, Any]]]:
|
||||
"""Analyze subscription status dict, returning error count and connection issues.
|
||||
|
||||
This is the canonical implementation of subscription status analysis.
|
||||
TODO: subscriptions/diagnostics.py (lines 168-182) duplicates this logic.
|
||||
That module should be refactored to call this helper once file ownership
|
||||
allows cross-agent edits. See Code-H05.
|
||||
|
||||
Args:
|
||||
status: Dict of subscription name -> status info from get_subscription_status().
|
||||
|
||||
Returns:
|
||||
Tuple of (error_count, connection_issues_list).
|
||||
"""
|
||||
error_count = 0
|
||||
connection_issues: list[dict[str, Any]] = []
|
||||
|
||||
for sub_name, sub_status in status.items():
|
||||
runtime = sub_status.get("runtime", {})
|
||||
conn_state = runtime.get("connection_state", "unknown")
|
||||
if conn_state in ("error", "auth_failed", "timeout", "max_retries_exceeded"):
|
||||
error_count += 1
|
||||
if runtime.get("last_error"):
|
||||
connection_issues.append(
|
||||
{
|
||||
"subscription": sub_name,
|
||||
"state": conn_state,
|
||||
"error": runtime["last_error"],
|
||||
}
|
||||
)
|
||||
|
||||
return error_count, connection_issues
|
||||
|
||||
|
||||
async def _diagnose_subscriptions() -> dict[str, Any]:
|
||||
"""Import and run subscription diagnostics."""
|
||||
try:
|
||||
@@ -223,13 +273,10 @@ async def _diagnose_subscriptions() -> dict[str, Any]:
|
||||
|
||||
await ensure_subscriptions_started()
|
||||
|
||||
status = subscription_manager.get_subscription_status()
|
||||
# This list is intentionally placed into the summary dict below and then
|
||||
# appended to in the loop — the mutable alias ensures both references
|
||||
# reflect the same data without a second pass.
|
||||
connection_issues: list[dict[str, Any]] = []
|
||||
status = await subscription_manager.get_subscription_status()
|
||||
error_count, connection_issues = _analyze_subscription_status(status)
|
||||
|
||||
diagnostic_info: dict[str, Any] = {
|
||||
return {
|
||||
"timestamp": datetime.datetime.now(datetime.UTC).isoformat(),
|
||||
"environment": {
|
||||
"auto_start_enabled": subscription_manager.auto_start_enabled,
|
||||
@@ -241,27 +288,11 @@ async def _diagnose_subscriptions() -> dict[str, Any]:
|
||||
"total_configured": len(subscription_manager.subscription_configs),
|
||||
"active_count": len(subscription_manager.active_subscriptions),
|
||||
"with_data": len(subscription_manager.resource_data),
|
||||
"in_error_state": 0,
|
||||
"in_error_state": error_count,
|
||||
"connection_issues": connection_issues,
|
||||
},
|
||||
}
|
||||
|
||||
for sub_name, sub_status in status.items():
|
||||
runtime = sub_status.get("runtime", {})
|
||||
conn_state = runtime.get("connection_state", "unknown")
|
||||
if conn_state in ("error", "auth_failed", "timeout", "max_retries_exceeded"):
|
||||
diagnostic_info["summary"]["in_error_state"] += 1
|
||||
if runtime.get("last_error"):
|
||||
connection_issues.append(
|
||||
{
|
||||
"subscription": sub_name,
|
||||
"state": conn_state,
|
||||
"error": runtime["last_error"],
|
||||
}
|
||||
)
|
||||
|
||||
return diagnostic_info
|
||||
|
||||
except ImportError:
|
||||
return {
|
||||
"error": "Subscription modules not available",
|
||||
|
||||
@@ -10,7 +10,8 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
from ..core.utils import format_kb
|
||||
|
||||
|
||||
# Pre-built queries keyed by action name
|
||||
@@ -19,7 +20,7 @@ QUERIES: dict[str, str] = {
|
||||
query GetSystemInfo {
|
||||
info {
|
||||
os { platform distro release codename kernel arch hostname codepage logofile serial build uptime }
|
||||
cpu { manufacturer brand vendor family model stepping revision voltage speed speedmin speedmax threads cores processors socket cache flags }
|
||||
cpu { manufacturer brand vendor family model stepping revision voltage speed speedmin speedmax threads cores processors socket cache }
|
||||
memory {
|
||||
layout { bank type clockSpeed formFactor manufacturer partNum serialNum }
|
||||
}
|
||||
@@ -81,7 +82,6 @@ QUERIES: dict[str, str] = {
|
||||
shareAvahiEnabled safeMode startMode configValid configError joinStatus
|
||||
deviceCount flashGuid flashProduct flashVendor mdState mdVersion
|
||||
shareCount shareSmbCount shareNfsCount shareAfpCount shareMoverActive
|
||||
csrfToken
|
||||
}
|
||||
}
|
||||
""",
|
||||
@@ -156,6 +156,8 @@ QUERIES: dict[str, str] = {
|
||||
""",
|
||||
}
|
||||
|
||||
ALL_ACTIONS = set(QUERIES)
|
||||
|
||||
INFO_ACTIONS = Literal[
|
||||
"overview",
|
||||
"array",
|
||||
@@ -178,9 +180,13 @@ INFO_ACTIONS = Literal[
|
||||
"ups_config",
|
||||
]
|
||||
|
||||
assert set(QUERIES.keys()) == set(INFO_ACTIONS.__args__), (
|
||||
"QUERIES keys and INFO_ACTIONS are out of sync"
|
||||
)
|
||||
if set(INFO_ACTIONS.__args__) != ALL_ACTIONS:
|
||||
_missing = ALL_ACTIONS - set(INFO_ACTIONS.__args__)
|
||||
_extra = set(INFO_ACTIONS.__args__) - ALL_ACTIONS
|
||||
raise RuntimeError(
|
||||
f"QUERIES keys and INFO_ACTIONS are out of sync. "
|
||||
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
|
||||
)
|
||||
|
||||
|
||||
def _process_system_info(raw_info: dict[str, Any]) -> dict[str, Any]:
|
||||
@@ -189,17 +195,17 @@ def _process_system_info(raw_info: dict[str, Any]) -> dict[str, Any]:
|
||||
if raw_info.get("os"):
|
||||
os_info = raw_info["os"]
|
||||
summary["os"] = (
|
||||
f"{os_info.get('distro', '')} {os_info.get('release', '')} "
|
||||
f"({os_info.get('platform', '')}, {os_info.get('arch', '')})"
|
||||
f"{os_info.get('distro') or 'unknown'} {os_info.get('release') or 'unknown'} "
|
||||
f"({os_info.get('platform') or 'unknown'}, {os_info.get('arch') or 'unknown'})"
|
||||
)
|
||||
summary["hostname"] = os_info.get("hostname")
|
||||
summary["hostname"] = os_info.get("hostname") or "unknown"
|
||||
summary["uptime"] = os_info.get("uptime")
|
||||
|
||||
if raw_info.get("cpu"):
|
||||
cpu = raw_info["cpu"]
|
||||
summary["cpu"] = (
|
||||
f"{cpu.get('manufacturer', '')} {cpu.get('brand', '')} "
|
||||
f"({cpu.get('cores', '?')} cores, {cpu.get('threads', '?')} threads)"
|
||||
f"{cpu.get('manufacturer') or 'unknown'} {cpu.get('brand') or 'unknown'} "
|
||||
f"({cpu.get('cores') or '?'} cores, {cpu.get('threads') or '?'} threads)"
|
||||
)
|
||||
|
||||
if raw_info.get("memory") and raw_info["memory"].get("layout"):
|
||||
@@ -207,10 +213,10 @@ def _process_system_info(raw_info: dict[str, Any]) -> dict[str, Any]:
|
||||
summary["memory_layout_details"] = []
|
||||
for stick in mem_layout:
|
||||
summary["memory_layout_details"].append(
|
||||
f"Bank {stick.get('bank', '?')}: Type {stick.get('type', '?')}, "
|
||||
f"Speed {stick.get('clockSpeed', '?')}MHz, "
|
||||
f"Manufacturer: {stick.get('manufacturer', '?')}, "
|
||||
f"Part: {stick.get('partNum', '?')}"
|
||||
f"Bank {stick.get('bank') or '?'}: Type {stick.get('type') or '?'}, "
|
||||
f"Speed {stick.get('clockSpeed') or '?'}MHz, "
|
||||
f"Manufacturer: {stick.get('manufacturer') or '?'}, "
|
||||
f"Part: {stick.get('partNum') or '?'}"
|
||||
)
|
||||
summary["memory_summary"] = (
|
||||
"Stick layout details retrieved. Overall total/used/free memory stats "
|
||||
@@ -255,31 +261,14 @@ def _analyze_disk_health(disks: list[dict[str, Any]]) -> dict[str, int]:
|
||||
return counts
|
||||
|
||||
|
||||
def _format_kb(k: Any) -> str:
|
||||
"""Format kilobyte values into human-readable sizes."""
|
||||
if k is None:
|
||||
return "N/A"
|
||||
try:
|
||||
k = int(k)
|
||||
except (ValueError, TypeError):
|
||||
return "N/A"
|
||||
if k >= 1024 * 1024 * 1024:
|
||||
return f"{k / (1024 * 1024 * 1024):.2f} TB"
|
||||
if k >= 1024 * 1024:
|
||||
return f"{k / (1024 * 1024):.2f} GB"
|
||||
if k >= 1024:
|
||||
return f"{k / 1024:.2f} MB"
|
||||
return f"{k} KB"
|
||||
|
||||
|
||||
def _process_array_status(raw: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Process raw array data into summary + details."""
|
||||
summary: dict[str, Any] = {"state": raw.get("state")}
|
||||
if raw.get("capacity") and raw["capacity"].get("kilobytes"):
|
||||
kb = raw["capacity"]["kilobytes"]
|
||||
summary["capacity_total"] = _format_kb(kb.get("total"))
|
||||
summary["capacity_used"] = _format_kb(kb.get("used"))
|
||||
summary["capacity_free"] = _format_kb(kb.get("free"))
|
||||
summary["capacity_total"] = format_kb(kb.get("total"))
|
||||
summary["capacity_used"] = format_kb(kb.get("used"))
|
||||
summary["capacity_free"] = format_kb(kb.get("free"))
|
||||
|
||||
summary["num_parity_disks"] = len(raw.get("parities", []))
|
||||
summary["num_data_disks"] = len(raw.get("disks", []))
|
||||
@@ -345,8 +334,8 @@ def register_info_tool(mcp: FastMCP) -> None:
|
||||
ups_device - Single UPS device (requires device_id)
|
||||
ups_config - UPS configuration
|
||||
"""
|
||||
if action not in QUERIES:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {list(QUERIES.keys())}")
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
if action == "ups_device" and not device_id:
|
||||
raise ToolError("device_id is required for ups_device action")
|
||||
@@ -377,7 +366,7 @@ def register_info_tool(mcp: FastMCP) -> None:
|
||||
"ups_devices": ("upsDevices", "ups_devices"),
|
||||
}
|
||||
|
||||
try:
|
||||
with tool_error_handler("info", action, logger):
|
||||
logger.info(f"Executing unraid_info action={action}")
|
||||
data = await make_graphql_request(query, variables)
|
||||
|
||||
@@ -426,14 +415,8 @@ def register_info_tool(mcp: FastMCP) -> None:
|
||||
if action in list_actions:
|
||||
response_key, output_key = list_actions[action]
|
||||
items = data.get(response_key) or []
|
||||
return {output_key: list(items) if isinstance(items, list) else []}
|
||||
return {output_key: items}
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_info action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute info/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Info tool registered successfully")
|
||||
|
||||
@@ -10,7 +10,7 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -45,6 +45,7 @@ MUTATIONS: dict[str, str] = {
|
||||
}
|
||||
|
||||
DESTRUCTIVE_ACTIONS = {"delete"}
|
||||
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
|
||||
|
||||
KEY_ACTIONS = Literal[
|
||||
"list",
|
||||
@@ -76,14 +77,13 @@ def register_keys_tool(mcp: FastMCP) -> None:
|
||||
update - Update an API key (requires key_id; optional name, roles)
|
||||
delete - Delete API keys (requires key_id, confirm=True)
|
||||
"""
|
||||
all_actions = set(QUERIES) | set(MUTATIONS)
|
||||
if action not in all_actions:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(all_actions)}")
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
if action in DESTRUCTIVE_ACTIONS and not confirm:
|
||||
raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.")
|
||||
|
||||
try:
|
||||
with tool_error_handler("keys", action, logger):
|
||||
logger.info(f"Executing unraid_keys action={action}")
|
||||
|
||||
if action == "list":
|
||||
@@ -141,10 +141,4 @@ def register_keys_tool(mcp: FastMCP) -> None:
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_keys action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute keys/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Keys tool registered successfully")
|
||||
|
||||
@@ -10,7 +10,7 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -76,6 +76,8 @@ MUTATIONS: dict[str, str] = {
|
||||
}
|
||||
|
||||
DESTRUCTIVE_ACTIONS = {"delete", "delete_archived"}
|
||||
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
|
||||
_VALID_IMPORTANCE = {"ALERT", "WARNING", "NORMAL"}
|
||||
|
||||
NOTIFICATION_ACTIONS = Literal[
|
||||
"overview",
|
||||
@@ -120,16 +122,13 @@ def register_notifications_tool(mcp: FastMCP) -> None:
|
||||
delete_archived - Delete all archived notifications (requires confirm=True)
|
||||
archive_all - Archive all notifications (optional importance filter)
|
||||
"""
|
||||
all_actions = {**QUERIES, **MUTATIONS}
|
||||
if action not in all_actions:
|
||||
raise ToolError(
|
||||
f"Invalid action '{action}'. Must be one of: {list(all_actions.keys())}"
|
||||
)
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
if action in DESTRUCTIVE_ACTIONS and not confirm:
|
||||
raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.")
|
||||
|
||||
try:
|
||||
with tool_error_handler("notifications", action, logger):
|
||||
logger.info(f"Executing unraid_notifications action={action}")
|
||||
|
||||
if action == "overview":
|
||||
@@ -147,18 +146,29 @@ def register_notifications_tool(mcp: FastMCP) -> None:
|
||||
filter_vars["importance"] = importance.upper()
|
||||
data = await make_graphql_request(QUERIES["list"], {"filter": filter_vars})
|
||||
notifications = data.get("notifications", {})
|
||||
result = notifications.get("list", [])
|
||||
return {"notifications": list(result) if isinstance(result, list) else []}
|
||||
return {"notifications": notifications.get("list", [])}
|
||||
|
||||
if action == "warnings":
|
||||
data = await make_graphql_request(QUERIES["warnings"])
|
||||
notifications = data.get("notifications", {})
|
||||
result = notifications.get("warningsAndAlerts", [])
|
||||
return {"warnings": list(result) if isinstance(result, list) else []}
|
||||
return {"warnings": notifications.get("warningsAndAlerts", [])}
|
||||
|
||||
if action == "create":
|
||||
if title is None or subject is None or description is None or importance is None:
|
||||
raise ToolError("create requires title, subject, description, and importance")
|
||||
if importance.upper() not in _VALID_IMPORTANCE:
|
||||
raise ToolError(
|
||||
f"importance must be one of: {', '.join(sorted(_VALID_IMPORTANCE))}. "
|
||||
f"Got: '{importance}'"
|
||||
)
|
||||
if len(title) > 200:
|
||||
raise ToolError(f"title must be at most 200 characters (got {len(title)})")
|
||||
if len(subject) > 500:
|
||||
raise ToolError(f"subject must be at most 500 characters (got {len(subject)})")
|
||||
if len(description) > 2000:
|
||||
raise ToolError(
|
||||
f"description must be at most 2000 characters (got {len(description)})"
|
||||
)
|
||||
input_data = {
|
||||
"title": title,
|
||||
"subject": subject,
|
||||
@@ -196,10 +206,4 @@ def register_notifications_tool(mcp: FastMCP) -> None:
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_notifications action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute notifications/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Notifications tool registered successfully")
|
||||
|
||||
@@ -4,13 +4,14 @@ Provides the `unraid_rclone` tool with 4 actions for managing
|
||||
cloud storage remotes (S3, Google Drive, Dropbox, FTP, etc.).
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Literal
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -49,6 +50,51 @@ RCLONE_ACTIONS = Literal[
|
||||
"delete_remote",
|
||||
]
|
||||
|
||||
# Max config entries to prevent abuse
|
||||
_MAX_CONFIG_KEYS = 50
|
||||
# Pattern for suspicious key names (path traversal, shell metacharacters)
|
||||
_DANGEROUS_KEY_PATTERN = re.compile(r"[.]{2}|[/\\;|`$(){}]")
|
||||
# Max length for individual config values
|
||||
_MAX_VALUE_LENGTH = 4096
|
||||
|
||||
|
||||
def _validate_config_data(config_data: dict[str, Any]) -> dict[str, str]:
|
||||
"""Validate and sanitize rclone config_data before passing to GraphQL.
|
||||
|
||||
Ensures all keys and values are safe strings with no injection vectors.
|
||||
|
||||
Raises:
|
||||
ToolError: If config_data contains invalid keys or values
|
||||
"""
|
||||
if len(config_data) > _MAX_CONFIG_KEYS:
|
||||
raise ToolError(f"config_data has {len(config_data)} keys (max {_MAX_CONFIG_KEYS})")
|
||||
|
||||
validated: dict[str, str] = {}
|
||||
for key, value in config_data.items():
|
||||
if not isinstance(key, str) or not key.strip():
|
||||
raise ToolError(
|
||||
f"config_data keys must be non-empty strings, got: {type(key).__name__}"
|
||||
)
|
||||
if _DANGEROUS_KEY_PATTERN.search(key):
|
||||
raise ToolError(
|
||||
f"config_data key '{key}' contains disallowed characters "
|
||||
f"(path traversal or shell metacharacters)"
|
||||
)
|
||||
if not isinstance(value, (str, int, float, bool)):
|
||||
raise ToolError(
|
||||
f"config_data['{key}'] must be a string, number, or boolean, "
|
||||
f"got: {type(value).__name__}"
|
||||
)
|
||||
str_value = str(value)
|
||||
if len(str_value) > _MAX_VALUE_LENGTH:
|
||||
raise ToolError(
|
||||
f"config_data['{key}'] value exceeds max length "
|
||||
f"({len(str_value)} > {_MAX_VALUE_LENGTH})"
|
||||
)
|
||||
validated[key] = str_value
|
||||
|
||||
return validated
|
||||
|
||||
|
||||
def register_rclone_tool(mcp: FastMCP) -> None:
|
||||
"""Register the unraid_rclone tool with the FastMCP instance."""
|
||||
@@ -75,7 +121,7 @@ def register_rclone_tool(mcp: FastMCP) -> None:
|
||||
if action in DESTRUCTIVE_ACTIONS and not confirm:
|
||||
raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.")
|
||||
|
||||
try:
|
||||
with tool_error_handler("rclone", action, logger):
|
||||
logger.info(f"Executing unraid_rclone action={action}")
|
||||
|
||||
if action == "list_remotes":
|
||||
@@ -96,9 +142,10 @@ def register_rclone_tool(mcp: FastMCP) -> None:
|
||||
if action == "create_remote":
|
||||
if name is None or provider_type is None or config_data is None:
|
||||
raise ToolError("create_remote requires name, provider_type, and config_data")
|
||||
validated_config = _validate_config_data(config_data)
|
||||
data = await make_graphql_request(
|
||||
MUTATIONS["create_remote"],
|
||||
{"input": {"name": name, "type": provider_type, "config": config_data}},
|
||||
{"input": {"name": name, "type": provider_type, "config": validated_config}},
|
||||
)
|
||||
remote = data.get("rclone", {}).get("createRCloneRemote")
|
||||
if not remote:
|
||||
@@ -127,10 +174,4 @@ def register_rclone_tool(mcp: FastMCP) -> None:
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_rclone action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute rclone/{action}: {e!s}") from e
|
||||
|
||||
logger.info("RClone tool registered successfully")
|
||||
|
||||
@@ -4,17 +4,19 @@ Provides the `unraid_storage` tool with 6 actions for shares, physical disks,
|
||||
unassigned devices, log files, and log content retrieval.
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Any, Literal
|
||||
|
||||
import anyio
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import DISK_TIMEOUT, make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
from ..core.utils import format_bytes
|
||||
|
||||
|
||||
_ALLOWED_LOG_PREFIXES = ("/var/log/", "/boot/logs/", "/mnt/")
|
||||
_MAX_TAIL_LINES = 10_000
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
"shares": """
|
||||
@@ -56,6 +58,8 @@ QUERIES: dict[str, str] = {
|
||||
""",
|
||||
}
|
||||
|
||||
ALL_ACTIONS = set(QUERIES)
|
||||
|
||||
STORAGE_ACTIONS = Literal[
|
||||
"shares",
|
||||
"disks",
|
||||
@@ -66,21 +70,6 @@ STORAGE_ACTIONS = Literal[
|
||||
]
|
||||
|
||||
|
||||
def format_bytes(bytes_value: int | None) -> str:
|
||||
"""Format byte values into human-readable sizes."""
|
||||
if bytes_value is None:
|
||||
return "N/A"
|
||||
try:
|
||||
value = float(int(bytes_value))
|
||||
except (ValueError, TypeError):
|
||||
return "N/A"
|
||||
for unit in ["B", "KB", "MB", "GB", "TB", "PB"]:
|
||||
if value < 1024.0:
|
||||
return f"{value:.2f} {unit}"
|
||||
value /= 1024.0
|
||||
return f"{value:.2f} EB"
|
||||
|
||||
|
||||
def register_storage_tool(mcp: FastMCP) -> None:
|
||||
"""Register the unraid_storage tool with the FastMCP instance."""
|
||||
|
||||
@@ -101,17 +90,22 @@ def register_storage_tool(mcp: FastMCP) -> None:
|
||||
log_files - List available log files
|
||||
logs - Retrieve log content (requires log_path, optional tail_lines)
|
||||
"""
|
||||
if action not in QUERIES:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {list(QUERIES.keys())}")
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
if action == "disk_details" and not disk_id:
|
||||
raise ToolError("disk_id is required for 'disk_details' action")
|
||||
|
||||
if tail_lines < 1 or tail_lines > _MAX_TAIL_LINES:
|
||||
raise ToolError(f"tail_lines must be between 1 and {_MAX_TAIL_LINES}, got {tail_lines}")
|
||||
|
||||
if action == "logs":
|
||||
if not log_path:
|
||||
raise ToolError("log_path is required for 'logs' action")
|
||||
# Resolve path to prevent traversal attacks (e.g. /var/log/../../etc/shadow)
|
||||
normalized = str(await anyio.Path(log_path).resolve())
|
||||
# Resolve path synchronously to prevent traversal attacks.
|
||||
# Using os.path.realpath instead of anyio.Path.resolve() because the
|
||||
# async variant blocks on NFS-mounted paths under /mnt/ (Perf-AI-1).
|
||||
normalized = os.path.realpath(log_path) # noqa: ASYNC240
|
||||
if not any(normalized.startswith(p) for p in _ALLOWED_LOG_PREFIXES):
|
||||
raise ToolError(
|
||||
f"log_path must start with one of: {', '.join(_ALLOWED_LOG_PREFIXES)}. "
|
||||
@@ -128,17 +122,15 @@ def register_storage_tool(mcp: FastMCP) -> None:
|
||||
elif action == "logs":
|
||||
variables = {"path": log_path, "lines": tail_lines}
|
||||
|
||||
try:
|
||||
with tool_error_handler("storage", action, logger):
|
||||
logger.info(f"Executing unraid_storage action={action}")
|
||||
data = await make_graphql_request(query, variables, custom_timeout=custom_timeout)
|
||||
|
||||
if action == "shares":
|
||||
shares = data.get("shares", [])
|
||||
return {"shares": list(shares) if isinstance(shares, list) else []}
|
||||
return {"shares": data.get("shares", [])}
|
||||
|
||||
if action == "disks":
|
||||
disks = data.get("disks", [])
|
||||
return {"disks": list(disks) if isinstance(disks, list) else []}
|
||||
return {"disks": data.get("disks", [])}
|
||||
|
||||
if action == "disk_details":
|
||||
raw = data.get("disk", {})
|
||||
@@ -159,22 +151,14 @@ def register_storage_tool(mcp: FastMCP) -> None:
|
||||
return {"summary": summary, "details": raw}
|
||||
|
||||
if action == "unassigned":
|
||||
devices = data.get("unassignedDevices", [])
|
||||
return {"devices": list(devices) if isinstance(devices, list) else []}
|
||||
return {"devices": data.get("unassignedDevices", [])}
|
||||
|
||||
if action == "log_files":
|
||||
files = data.get("logFiles", [])
|
||||
return {"log_files": list(files) if isinstance(files, list) else []}
|
||||
return {"log_files": data.get("logFiles", [])}
|
||||
|
||||
if action == "logs":
|
||||
return dict(data.get("logFile") or {})
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_storage action={action}: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute storage/{action}: {e!s}") from e
|
||||
|
||||
logger.info("Storage tool registered successfully")
|
||||
|
||||
@@ -10,7 +10,7 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -39,17 +39,11 @@ def register_users_tool(mcp: FastMCP) -> None:
|
||||
Note: Unraid API does not support user management operations (list, add, delete).
|
||||
"""
|
||||
if action not in ALL_ACTIONS:
|
||||
raise ToolError(f"Invalid action '{action}'. Must be: me")
|
||||
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
||||
|
||||
try:
|
||||
with tool_error_handler("users", action, logger):
|
||||
logger.info("Executing unraid_users action=me")
|
||||
data = await make_graphql_request(QUERIES["me"])
|
||||
return data.get("me") or {}
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_users action=me: {e}", exc_info=True)
|
||||
raise ToolError(f"Failed to execute users/me: {e!s}") from e
|
||||
|
||||
logger.info("Users tool registered successfully")
|
||||
|
||||
@@ -10,7 +10,7 @@ from fastmcp import FastMCP
|
||||
|
||||
from ..config.logging import logger
|
||||
from ..core.client import make_graphql_request
|
||||
from ..core.exceptions import ToolError
|
||||
from ..core.exceptions import ToolError, tool_error_handler
|
||||
|
||||
|
||||
QUERIES: dict[str, str] = {
|
||||
@@ -19,6 +19,13 @@ QUERIES: dict[str, str] = {
|
||||
vms { id domains { id name state uuid } }
|
||||
}
|
||||
""",
|
||||
# NOTE: The Unraid GraphQL API does not expose a single-VM query.
|
||||
# The details query is identical to list; client-side filtering is required.
|
||||
"details": """
|
||||
query ListVMs {
|
||||
vms { id domains { id name state uuid } }
|
||||
}
|
||||
""",
|
||||
}
|
||||
|
||||
MUTATIONS: dict[str, str] = {
|
||||
@@ -64,7 +71,7 @@ VM_ACTIONS = Literal[
|
||||
"reset",
|
||||
]
|
||||
|
||||
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) | {"details"}
|
||||
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
|
||||
|
||||
|
||||
def register_vm_tool(mcp: FastMCP) -> None:
|
||||
@@ -98,20 +105,26 @@ def register_vm_tool(mcp: FastMCP) -> None:
|
||||
if action in DESTRUCTIVE_ACTIONS and not confirm:
|
||||
raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.")
|
||||
|
||||
try:
|
||||
logger.info(f"Executing unraid_vm action={action}")
|
||||
with tool_error_handler("vm", action, logger):
|
||||
try:
|
||||
logger.info(f"Executing unraid_vm action={action}")
|
||||
|
||||
if action in ("list", "details"):
|
||||
data = await make_graphql_request(QUERIES["list"])
|
||||
if data.get("vms"):
|
||||
if action == "list":
|
||||
data = await make_graphql_request(QUERIES["list"])
|
||||
if data.get("vms"):
|
||||
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
|
||||
if isinstance(vms, dict):
|
||||
vms = [vms]
|
||||
return {"vms": vms}
|
||||
return {"vms": []}
|
||||
|
||||
if action == "details":
|
||||
data = await make_graphql_request(QUERIES["details"])
|
||||
if not data.get("vms"):
|
||||
raise ToolError("No VM data returned from server")
|
||||
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
|
||||
if isinstance(vms, dict):
|
||||
vms = [vms]
|
||||
|
||||
if action == "list":
|
||||
return {"vms": vms}
|
||||
|
||||
# details: find specific VM
|
||||
for vm in vms:
|
||||
if (
|
||||
vm.get("uuid") == vm_id
|
||||
@@ -121,33 +134,28 @@ def register_vm_tool(mcp: FastMCP) -> None:
|
||||
return dict(vm)
|
||||
available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms]
|
||||
raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}")
|
||||
if action == "details":
|
||||
raise ToolError("No VM data returned from server")
|
||||
return {"vms": []}
|
||||
|
||||
# Mutations
|
||||
if action in MUTATIONS:
|
||||
data = await make_graphql_request(MUTATIONS[action], {"id": vm_id})
|
||||
field = _MUTATION_FIELDS.get(action, action)
|
||||
if data.get("vm") and field in data["vm"]:
|
||||
return {
|
||||
"success": data["vm"][field],
|
||||
"action": action,
|
||||
"vm_id": vm_id,
|
||||
}
|
||||
raise ToolError(f"Failed to {action} VM or unexpected response")
|
||||
# Mutations
|
||||
if action in MUTATIONS:
|
||||
data = await make_graphql_request(MUTATIONS[action], {"id": vm_id})
|
||||
field = _MUTATION_FIELDS.get(action, action)
|
||||
if data.get("vm") and field in data["vm"]:
|
||||
return {
|
||||
"success": data["vm"][field],
|
||||
"action": action,
|
||||
"vm_id": vm_id,
|
||||
}
|
||||
raise ToolError(f"Failed to {action} VM or unexpected response")
|
||||
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
||||
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in unraid_vm action={action}: {e}", exc_info=True)
|
||||
msg = str(e)
|
||||
if "VMs are not available" in msg:
|
||||
raise ToolError(
|
||||
"VMs not available on this server. Check VM support is enabled."
|
||||
) from e
|
||||
raise ToolError(f"Failed to execute vm/{action}: {msg}") from e
|
||||
except ToolError:
|
||||
raise
|
||||
except Exception as e:
|
||||
if "VMs are not available" in str(e):
|
||||
raise ToolError(
|
||||
"VMs not available on this server. Check VM support is enabled."
|
||||
) from e
|
||||
raise
|
||||
|
||||
logger.info("VM tool registered successfully")
|
||||
|
||||
Reference in New Issue
Block a user