mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-02 00:04:45 -08:00
Addresses issues found by 4 parallel review agents (code-reviewer,
silent-failure-hunter, type-design-analyzer, pr-test-analyzer).
Source fixes:
- core/utils.py: add public safe_display_url() (moved from tools/health.py)
- core/client.py: rename _redact_sensitive → redact_sensitive (public API)
- core/types.py: add SubscriptionData.__post_init__ for tz-aware datetime
enforcement; remove 6 unused type aliases (SystemHealth, APIResponse, etc.)
- subscriptions/manager.py: add exc_info=True to both except-Exception blocks;
add except ValueError break-on-config-error before retry loop; import
redact_sensitive by new public name
- subscriptions/resources.py: re-raise in autostart_subscriptions() so
ensure_subscriptions_started() doesn't permanently set _subscriptions_started
- subscriptions/diagnostics.py: except ToolError: raise before broad except;
use safe_display_url() instead of raw URL slice
- tools/health.py: move _safe_display_url to core/utils; add exc_info=True;
raise ToolError (not return dict) on ImportError
- tools/info.py: use get_args(INFO_ACTIONS) instead of INFO_ACTIONS.__args__
- tools/{array,docker,keys,notifications,rclone,storage,virtualization}.py:
add Literal-vs-ALL_ACTIONS sync check at import time
Test fixes:
- test_health.py: import safe_display_url from core.utils; update
test_diagnose_import_error_internal to expect ToolError (not error dict)
- test_storage.py: add 3 safe_get tests for zero/False/empty-string values
- test_subscription_manager.py: add TestCapLogContentSingleMassiveLine (2 tests)
- test_client.py: rename _redact_sensitive → redact_sensitive; add tests for
new sensitive keys and is_cacheable explicit-keyword form
173 lines
5.8 KiB
Python
173 lines
5.8 KiB
Python
"""Storage and disk management.
|
|
|
|
Provides the `unraid_storage` tool with 6 actions for shares, physical disks,
|
|
unassigned devices, log files, and log content retrieval.
|
|
"""
|
|
|
|
import os
|
|
from typing import Any, Literal, get_args
|
|
|
|
from fastmcp import FastMCP
|
|
|
|
from ..config.logging import logger
|
|
from ..core.client import DISK_TIMEOUT, make_graphql_request
|
|
from ..core.exceptions import ToolError, tool_error_handler
|
|
from ..core.utils import format_bytes
|
|
|
|
|
|
_ALLOWED_LOG_PREFIXES = ("/var/log/", "/boot/logs/", "/mnt/")
|
|
_MAX_TAIL_LINES = 10_000
|
|
|
|
QUERIES: dict[str, str] = {
|
|
"shares": """
|
|
query GetSharesInfo {
|
|
shares {
|
|
id name free used size include exclude cache nameOrig
|
|
comment allocator splitLevel floor cow color luksStatus
|
|
}
|
|
}
|
|
""",
|
|
"disks": """
|
|
query ListPhysicalDisks {
|
|
disks { id device name }
|
|
}
|
|
""",
|
|
"disk_details": """
|
|
query GetDiskDetails($id: PrefixedID!) {
|
|
disk(id: $id) {
|
|
id device name serialNum size temperature
|
|
}
|
|
}
|
|
""",
|
|
"unassigned": """
|
|
query GetUnassignedDevices {
|
|
unassignedDevices { id device name size type }
|
|
}
|
|
""",
|
|
"log_files": """
|
|
query ListLogFiles {
|
|
logFiles { name path size modifiedAt }
|
|
}
|
|
""",
|
|
"logs": """
|
|
query GetLogContent($path: String!, $lines: Int) {
|
|
logFile(path: $path, lines: $lines) {
|
|
path content totalLines startLine
|
|
}
|
|
}
|
|
""",
|
|
}
|
|
|
|
ALL_ACTIONS = set(QUERIES)
|
|
|
|
STORAGE_ACTIONS = Literal[
|
|
"shares",
|
|
"disks",
|
|
"disk_details",
|
|
"unassigned",
|
|
"log_files",
|
|
"logs",
|
|
]
|
|
|
|
if set(get_args(STORAGE_ACTIONS)) != ALL_ACTIONS:
|
|
_missing = ALL_ACTIONS - set(get_args(STORAGE_ACTIONS))
|
|
_extra = set(get_args(STORAGE_ACTIONS)) - ALL_ACTIONS
|
|
raise RuntimeError(
|
|
f"STORAGE_ACTIONS and ALL_ACTIONS are out of sync. "
|
|
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
|
|
)
|
|
|
|
|
|
def register_storage_tool(mcp: FastMCP) -> None:
|
|
"""Register the unraid_storage tool with the FastMCP instance."""
|
|
|
|
@mcp.tool()
|
|
async def unraid_storage(
|
|
action: STORAGE_ACTIONS,
|
|
disk_id: str | None = None,
|
|
log_path: str | None = None,
|
|
tail_lines: int = 100,
|
|
) -> dict[str, Any]:
|
|
"""Manage Unraid storage, disks, and logs.
|
|
|
|
Actions:
|
|
shares - List all user shares with capacity info
|
|
disks - List all physical disks
|
|
disk_details - Detailed SMART info for a disk (requires disk_id)
|
|
unassigned - List unassigned devices
|
|
log_files - List available log files
|
|
logs - Retrieve log content (requires log_path, optional tail_lines)
|
|
"""
|
|
if action not in ALL_ACTIONS:
|
|
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
|
|
|
|
if action == "disk_details" and not disk_id:
|
|
raise ToolError("disk_id is required for 'disk_details' action")
|
|
|
|
if action == "logs" and (tail_lines < 1 or tail_lines > _MAX_TAIL_LINES):
|
|
raise ToolError(f"tail_lines must be between 1 and {_MAX_TAIL_LINES}, got {tail_lines}")
|
|
|
|
if action == "logs":
|
|
if not log_path:
|
|
raise ToolError("log_path is required for 'logs' action")
|
|
# Resolve path synchronously to prevent traversal attacks.
|
|
# Using os.path.realpath instead of anyio.Path.resolve() because the
|
|
# async variant blocks on NFS-mounted paths under /mnt/ (Perf-AI-1).
|
|
normalized = os.path.realpath(log_path) # noqa: ASYNC240
|
|
if not any(normalized.startswith(p) for p in _ALLOWED_LOG_PREFIXES):
|
|
raise ToolError(
|
|
f"log_path must start with one of: {', '.join(_ALLOWED_LOG_PREFIXES)}. "
|
|
f"Use log_files action to discover valid paths."
|
|
)
|
|
log_path = normalized
|
|
|
|
query = QUERIES[action]
|
|
variables: dict[str, Any] | None = None
|
|
custom_timeout = DISK_TIMEOUT if action in ("disks", "disk_details") else None
|
|
|
|
if action == "disk_details":
|
|
variables = {"id": disk_id}
|
|
elif action == "logs":
|
|
variables = {"path": log_path, "lines": tail_lines}
|
|
|
|
with tool_error_handler("storage", action, logger):
|
|
logger.info(f"Executing unraid_storage action={action}")
|
|
data = await make_graphql_request(query, variables, custom_timeout=custom_timeout)
|
|
|
|
if action == "shares":
|
|
return {"shares": data.get("shares", [])}
|
|
|
|
if action == "disks":
|
|
return {"disks": data.get("disks", [])}
|
|
|
|
if action == "disk_details":
|
|
raw = data.get("disk", {})
|
|
if not raw:
|
|
raise ToolError(f"Disk '{disk_id}' not found")
|
|
summary = {
|
|
"disk_id": raw.get("id"),
|
|
"device": raw.get("device"),
|
|
"name": raw.get("name"),
|
|
"serial_number": raw.get("serialNum"),
|
|
"size_formatted": format_bytes(raw.get("size")),
|
|
"temperature": (
|
|
f"{raw['temperature']}\u00b0C"
|
|
if raw.get("temperature") is not None
|
|
else "N/A"
|
|
),
|
|
}
|
|
return {"summary": summary, "details": raw}
|
|
|
|
if action == "unassigned":
|
|
return {"devices": data.get("unassignedDevices", [])}
|
|
|
|
if action == "log_files":
|
|
return {"log_files": data.get("logFiles", [])}
|
|
|
|
if action == "logs":
|
|
return dict(data.get("logFile") or {})
|
|
|
|
raise ToolError(f"Unhandled action '{action}' — this is a bug")
|
|
|
|
logger.info("Storage tool registered successfully")
|