mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-01 16:04:24 -08:00
Addresses all critical, high, medium, and low issues from full codebase review. 494 tests pass, ruff clean, ty type-check clean. Security: - Add tool_error_handler context manager (exceptions.py) — standardised error handling, eliminates 11 bare except-reraise patterns - Remove unused exception subclasses (ConfigurationError, UnraidAPIError, SubscriptionError, ValidationError, IdempotentOperationError) - Harden GraphQL subscription query validator with allow-list and forbidden-keyword regex (diagnostics.py) - Add input validation for rclone create_remote config_data: injection, path-traversal, and key-count limits (rclone.py) - Validate notifications importance enum before GraphQL request (notifications.py) - Sanitise HTTP/network/JSON error messages — no raw exception strings leaked to clients (client.py) - Strip path/creds from displayed API URL via _safe_display_url (health.py) - Enable Ruff S (bandit) rule category in pyproject.toml - Harden container mutations to strict-only matching — no fuzzy/substring for destructive operations (docker.py) Performance: - Token-bucket rate limiter (90 tokens, 9 req/s) with 429 retry backoff (client.py) - Lazy asyncio.Lock init via _get_client_lock() — fixes event-loop module-load crash (client.py) - Double-checked locking in get_http_client() for fast-path (client.py) - Short hex container ID fast-path skips list fetch (docker.py) - Cap resource_data log content to 1 MB / 5,000 lines (manager.py) - Reset reconnect counter after 30 s stable connection (manager.py) - Move tail_lines validation to module level; enforce 10,000 line cap (storage.py, docker.py) - force_terminal=True removed from logging RichHandler (logging.py) Architecture: - Register diagnostic tools in server startup (server.py) - Move ALL_ACTIONS computation to module level in all tools - Consolidate format_kb / format_bytes into shared core/utils.py - Add _safe_get() helper in core/utils.py for nested dict traversal - Extract _analyze_subscription_status() from health.py diagnose handler - Validate required config at startup — fail fast with CRITICAL log (server.py) Code quality: - Remove ~90 lines of dead Rich formatting helpers from logging.py - Remove dead self.websocket attribute from SubscriptionManager - Remove dead setup_uvicorn_logging() wrapper - Move _VALID_IMPORTANCE to module level (N806 fix) - Add slots=True to all three dataclasses (SubscriptionData, SystemHealth, APIResponse) - Fix None rendering as literal "None" string in info.py summaries - Change fuzzy-match log messages from INFO to DEBUG (docker.py) - UTC-aware datetimes throughout (manager.py, diagnostics.py) Infrastructure: - Upgrade base image python:3.11-slim → python:3.12-slim (Dockerfile) - Add non-root appuser (UID/GID 1000) with HEALTHCHECK (Dockerfile) - Add read_only, cap_drop: ALL, tmpfs /tmp to docker-compose.yml - Single-source version via importlib.metadata (pyproject.toml → __init__.py) - Add open_timeout to all websockets.connect() calls Tests: - Update error message matchers to match sanitised messages (test_client.py) - Fix patch targets for UNRAID_API_URL → utils module (test_subscriptions.py) - Fix importance="info" → importance="normal" (test_notifications.py, http_layer) - Fix naive datetime fixtures → UTC-aware (test_subscriptions.py) Co-authored-by: Claude <claude@anthropic.com>
165 lines
5.5 KiB
Python
165 lines
5.5 KiB
Python
"""Storage and disk management.
|
|
|
|
Provides the `unraid_storage` tool with 6 actions for shares, physical disks,
|
|
unassigned devices, log files, and log content retrieval.
|
|
"""
|
|
|
|
import os
|
|
from typing import Any, Literal
|
|
|
|
from fastmcp import FastMCP
|
|
|
|
from ..config.logging import logger
|
|
from ..core.client import DISK_TIMEOUT, make_graphql_request
|
|
from ..core.exceptions import ToolError, tool_error_handler
|
|
from ..core.utils import format_bytes
|
|
|
|
|
|
_ALLOWED_LOG_PREFIXES = ("/var/log/", "/boot/logs/", "/mnt/")
|
|
_MAX_TAIL_LINES = 10_000
|
|
|
|
QUERIES: dict[str, str] = {
|
|
"shares": """
|
|
query GetSharesInfo {
|
|
shares {
|
|
id name free used size include exclude cache nameOrig
|
|
comment allocator splitLevel floor cow color luksStatus
|
|
}
|
|
}
|
|
""",
|
|
"disks": """
|
|
query ListPhysicalDisks {
|
|
disks { id device name }
|
|
}
|
|
""",
|
|
"disk_details": """
|
|
query GetDiskDetails($id: PrefixedID!) {
|
|
disk(id: $id) {
|
|
id device name serialNum size temperature
|
|
}
|
|
}
|
|
""",
|
|
"unassigned": """
|
|
query GetUnassignedDevices {
|
|
unassignedDevices { id device name size type }
|
|
}
|
|
""",
|
|
"log_files": """
|
|
query ListLogFiles {
|
|
logFiles { name path size modifiedAt }
|
|
}
|
|
""",
|
|
"logs": """
|
|
query GetLogContent($path: String!, $lines: Int) {
|
|
logFile(path: $path, lines: $lines) {
|
|
path content totalLines startLine
|
|
}
|
|
}
|
|
""",
|
|
}
|
|
|
|
ALL_ACTIONS = set(QUERIES)
|
|
|
|
STORAGE_ACTIONS = Literal[
|
|
"shares",
|
|
"disks",
|
|
"disk_details",
|
|
"unassigned",
|
|
"log_files",
|
|
"logs",
|
|
]
|
|
|
|
|
|
def register_storage_tool(mcp: FastMCP) -> None:
|
|
"""Register the unraid_storage tool with the FastMCP instance."""
|
|
|
|
@mcp.tool()
|
|
async def unraid_storage(
|
|
action: STORAGE_ACTIONS,
|
|
disk_id: str | None = None,
|
|
log_path: str | None = None,
|
|
tail_lines: int = 100,
|
|
) -> dict[str, Any]:
|
|
"""Manage Unraid storage, disks, and logs.
|
|
|
|
Actions:
|
|
shares - List all user shares with capacity info
|
|
disks - List all physical disks
|
|
disk_details - Detailed SMART info for a disk (requires disk_id)
|
|
unassigned - List unassigned devices
|
|
log_files - List available log files
|
|
logs - Retrieve log content (requires log_path, optional tail_lines)
|
|
"""
|
|
if action not in ALL_ACTIONS:
|
|
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
|
|
|
if action == "disk_details" and not disk_id:
|
|
raise ToolError("disk_id is required for 'disk_details' action")
|
|
|
|
if tail_lines < 1 or tail_lines > _MAX_TAIL_LINES:
|
|
raise ToolError(f"tail_lines must be between 1 and {_MAX_TAIL_LINES}, got {tail_lines}")
|
|
|
|
if action == "logs":
|
|
if not log_path:
|
|
raise ToolError("log_path is required for 'logs' action")
|
|
# Resolve path synchronously to prevent traversal attacks.
|
|
# Using os.path.realpath instead of anyio.Path.resolve() because the
|
|
# async variant blocks on NFS-mounted paths under /mnt/ (Perf-AI-1).
|
|
normalized = os.path.realpath(log_path) # noqa: ASYNC240
|
|
if not any(normalized.startswith(p) for p in _ALLOWED_LOG_PREFIXES):
|
|
raise ToolError(
|
|
f"log_path must start with one of: {', '.join(_ALLOWED_LOG_PREFIXES)}. "
|
|
f"Use log_files action to discover valid paths."
|
|
)
|
|
log_path = normalized
|
|
|
|
query = QUERIES[action]
|
|
variables: dict[str, Any] | None = None
|
|
custom_timeout = DISK_TIMEOUT if action in ("disks", "disk_details") else None
|
|
|
|
if action == "disk_details":
|
|
variables = {"id": disk_id}
|
|
elif action == "logs":
|
|
variables = {"path": log_path, "lines": tail_lines}
|
|
|
|
with tool_error_handler("storage", action, logger):
|
|
logger.info(f"Executing unraid_storage action={action}")
|
|
data = await make_graphql_request(query, variables, custom_timeout=custom_timeout)
|
|
|
|
if action == "shares":
|
|
return {"shares": data.get("shares", [])}
|
|
|
|
if action == "disks":
|
|
return {"disks": data.get("disks", [])}
|
|
|
|
if action == "disk_details":
|
|
raw = data.get("disk", {})
|
|
if not raw:
|
|
raise ToolError(f"Disk '{disk_id}' not found")
|
|
summary = {
|
|
"disk_id": raw.get("id"),
|
|
"device": raw.get("device"),
|
|
"name": raw.get("name"),
|
|
"serial_number": raw.get("serialNum"),
|
|
"size_formatted": format_bytes(raw.get("size")),
|
|
"temperature": (
|
|
f"{raw['temperature']}\u00b0C"
|
|
if raw.get("temperature") is not None
|
|
else "N/A"
|
|
),
|
|
}
|
|
return {"summary": summary, "details": raw}
|
|
|
|
if action == "unassigned":
|
|
return {"devices": data.get("unassignedDevices", [])}
|
|
|
|
if action == "log_files":
|
|
return {"log_files": data.get("logFiles", [])}
|
|
|
|
if action == "logs":
|
|
return dict(data.get("logFile") or {})
|
|
|
|
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
|
|
|
logger.info("Storage tool registered successfully")
|