refactor(tools)!: consolidate 15 individual tools into single unified unraid tool

BREAKING CHANGE: Replaces 15 separate MCP tools (unraid_info, unraid_array,
unraid_storage, unraid_docker, unraid_vm, unraid_notifications, unraid_rclone,
unraid_users, unraid_keys, unraid_health, unraid_settings, unraid_customization,
unraid_plugins, unraid_oidc, unraid_live) with a single `unraid` tool using
action (domain) + subaction (operation) routing.

New interface: unraid(action="system", subaction="overview") replaces
unraid_info(action="overview"). All 15 domains and ~108 subactions preserved.

- Add unraid_mcp/tools/unraid.py (1891 lines, all domains consolidated)
- Remove 15 individual tool files
- Update tools/__init__.py to register single unified tool
- Update server.py for new tool registration pattern
- Update subscriptions/manager.py and resources.py for new tool names
- Update all 25 test files + integration/contract/safety/schema/property tests
- Update mcporter smoke-test script for new tool interface
- Bump version 0.6.0 → 1.0.0

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Jacob Magar
2026-03-16 02:29:57 -04:00
parent faf9fb9ad7
commit dab1cd6995
48 changed files with 3591 additions and 4903 deletions

View File

@@ -1,14 +1,19 @@
"""MCP tools organized by functional domain.
"""MCP tools — single consolidated unraid tool with action + subaction routing.
10 consolidated tools with 76 actions total:
unraid_info - System information queries (19 actions)
unraid_array - Array operations and parity management (5 actions)
unraid_storage - Storage, disks, and logs (6 actions)
unraid_docker - Docker container management (15 actions)
unraid_vm - Virtual machine management (9 actions)
unraid_notifications - Notification management (9 actions)
unraid_rclone - Cloud storage remotes (4 actions)
unraid_users - User management (1 action)
unraid_keys - API key management (5 actions)
unraid_health - Health monitoring and diagnostics (3 actions)
unraid - All Unraid operations (15 actions, ~88 subactions)
system - System info, metrics, UPS, network, registration
health - Health checks, connection test, diagnostics, setup
array - Parity, array state, disk add/remove/mount
disk - Shares, physical disks, logs, flash backup
docker - Container list/details/start/stop/restart, networks
vm - VM list/details and lifecycle (start/stop/pause/resume/etc)
notification - Notification CRUD and bulk operations
key - API key management
plugin - Plugin list/add/remove
rclone - Cloud remote management
setting - System settings and UPS config
customization - Theme and UI customization
oidc - OIDC/SSO provider management
user - Current user info
live - Real-time subscription snapshots
"""

View File

@@ -1,214 +0,0 @@
"""Array management: parity checks, array state, and disk operations.
Provides the `unraid_array` tool with 13 actions covering parity check
management, array start/stop, and disk add/remove/mount operations.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"parity_status": """
query GetParityStatus {
array { parityCheckStatus { progress speed errors status paused running correcting } }
}
""",
"parity_history": """
query GetParityHistory {
parityHistory {
date duration speed status errors progress correcting paused running
}
}
""",
}
MUTATIONS: dict[str, str] = {
"parity_start": """
mutation StartParityCheck($correct: Boolean!) {
parityCheck { start(correct: $correct) }
}
""",
"parity_pause": """
mutation PauseParityCheck {
parityCheck { pause }
}
""",
"parity_resume": """
mutation ResumeParityCheck {
parityCheck { resume }
}
""",
"parity_cancel": """
mutation CancelParityCheck {
parityCheck { cancel }
}
""",
"start_array": """
mutation StartArray {
array { setState(input: { desiredState: START }) {
state capacity { kilobytes { free used total } }
}}
}
""",
"stop_array": """
mutation StopArray {
array { setState(input: { desiredState: STOP }) {
state
}}
}
""",
"add_disk": """
mutation AddDisk($id: PrefixedID!, $slot: Int) {
array { addDiskToArray(input: { id: $id, slot: $slot }) {
state disks { id name device type status }
}}
}
""",
"remove_disk": """
mutation RemoveDisk($id: PrefixedID!) {
array { removeDiskFromArray(input: { id: $id }) {
state disks { id name device type }
}}
}
""",
"mount_disk": """
mutation MountDisk($id: PrefixedID!) {
array { mountArrayDisk(id: $id) { id name device status } }
}
""",
"unmount_disk": """
mutation UnmountDisk($id: PrefixedID!) {
array { unmountArrayDisk(id: $id) { id name device status } }
}
""",
"clear_disk_stats": """
mutation ClearDiskStats($id: PrefixedID!) {
array { clearArrayDiskStatistics(id: $id) }
}
""",
}
DESTRUCTIVE_ACTIONS = {"remove_disk", "clear_disk_stats", "stop_array"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
ARRAY_ACTIONS = Literal[
"add_disk",
"clear_disk_stats",
"mount_disk",
"parity_cancel",
"parity_history",
"parity_pause",
"parity_resume",
"parity_start",
"parity_status",
"remove_disk",
"start_array",
"stop_array",
"unmount_disk",
]
if set(get_args(ARRAY_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(ARRAY_ACTIONS))
_extra = set(get_args(ARRAY_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"ARRAY_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_array_tool(mcp: FastMCP) -> None:
"""Register the unraid_array tool with the FastMCP instance."""
@mcp.tool()
async def unraid_array(
action: ARRAY_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
correct: bool | None = None,
disk_id: str | None = None,
slot: int | None = None,
) -> dict[str, Any]:
"""Manage Unraid array: parity checks, array state, and disk operations.
Parity check actions:
parity_start - Start parity check (correct=True to write fixes; required)
parity_pause - Pause running parity check
parity_resume - Resume paused parity check
parity_cancel - Cancel running parity check
parity_status - Get current parity check status and progress
parity_history - Get parity check history log
Array state actions:
start_array - Start the array (desiredState=START)
stop_array - Stop the array (desiredState=STOP)
Disk operations (requires disk_id):
add_disk - Add a disk to the array (requires disk_id; optional slot)
remove_disk - Remove a disk from the array (requires disk_id, confirm=True; array must be stopped)
mount_disk - Mount a disk (requires disk_id)
unmount_disk - Unmount a disk (requires disk_id)
clear_disk_stats - Clear I/O statistics for a disk (requires disk_id, confirm=True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
{
"remove_disk": f"Remove disk **{disk_id}** from the array. The array must be stopped first.",
"clear_disk_stats": f"Clear all I/O statistics for disk **{disk_id}**. This cannot be undone.",
"stop_array": "Stop the Unraid array. Running containers and VMs may lose access to array shares.",
},
)
with tool_error_handler("array", action, logger):
logger.info(f"Executing unraid_array action={action}")
# --- Queries ---
if action in QUERIES:
data = await make_graphql_request(QUERIES[action])
return {"success": True, "action": action, "data": data}
# --- Mutations ---
if action == "parity_start":
if correct is None:
raise ToolError("correct is required for 'parity_start' action")
data = await make_graphql_request(MUTATIONS[action], {"correct": correct})
return {"success": True, "action": action, "data": data}
if action in ("parity_pause", "parity_resume", "parity_cancel"):
data = await make_graphql_request(MUTATIONS[action])
return {"success": True, "action": action, "data": data}
if action in ("start_array", "stop_array"):
data = await make_graphql_request(MUTATIONS[action])
return {"success": True, "action": action, "data": data}
if action == "add_disk":
if not disk_id:
raise ToolError("disk_id is required for 'add_disk' action")
variables: dict[str, Any] = {"id": disk_id}
if slot is not None:
variables["slot"] = slot
data = await make_graphql_request(MUTATIONS[action], variables)
return {"success": True, "action": action, "data": data}
if action in ("remove_disk", "mount_disk", "unmount_disk", "clear_disk_stats"):
if not disk_id:
raise ToolError(f"disk_id is required for '{action}' action")
data = await make_graphql_request(MUTATIONS[action], {"id": disk_id})
return {"success": True, "action": action, "data": data}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Array tool registered successfully")

View File

@@ -1,119 +0,0 @@
"""UI customization and system state queries.
Provides the `unraid_customization` tool with 5 actions covering
theme/customization data, public UI config, initial setup state, and
theme mutation.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Literal, get_args
if TYPE_CHECKING:
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
QUERIES: dict[str, str] = {
"theme": """
query GetCustomization {
customization {
theme { name showBannerImage showBannerGradient showHeaderDescription
headerBackgroundColor headerPrimaryTextColor headerSecondaryTextColor }
partnerInfo { partnerName hasPartnerLogo partnerUrl partnerLogoUrl }
activationCode { code partnerName serverName sysModel comment header theme }
}
}
""",
"public_theme": """
query GetPublicTheme {
publicTheme { name showBannerImage showBannerGradient showHeaderDescription
headerBackgroundColor headerPrimaryTextColor headerSecondaryTextColor }
publicPartnerInfo { partnerName hasPartnerLogo partnerUrl partnerLogoUrl }
}
""",
"is_initial_setup": """
query IsInitialSetup {
isInitialSetup
}
""",
"sso_enabled": """
query IsSSOEnabled {
isSSOEnabled
}
""",
}
MUTATIONS: dict[str, str] = {
"set_theme": """
mutation SetTheme($theme: ThemeName!) {
customization { setTheme(theme: $theme) {
name showBannerImage showBannerGradient showHeaderDescription
}}
}
""",
}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
CUSTOMIZATION_ACTIONS = Literal[
"is_initial_setup",
"public_theme",
"set_theme",
"sso_enabled",
"theme",
]
if set(get_args(CUSTOMIZATION_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(CUSTOMIZATION_ACTIONS))
_extra = set(get_args(CUSTOMIZATION_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"CUSTOMIZATION_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing: {_missing or 'none'}. Extra: {_extra or 'none'}"
)
def register_customization_tool(mcp: FastMCP) -> None:
"""Register the unraid_customization tool with the FastMCP instance."""
@mcp.tool()
async def unraid_customization(
action: CUSTOMIZATION_ACTIONS,
theme_name: str | None = None,
) -> dict[str, Any]:
"""Manage Unraid UI customization and system state.
Actions:
theme - Get full customization (theme, partner info, activation code)
public_theme - Get public theme and partner info (no auth required)
is_initial_setup - Check if server is in initial setup mode
sso_enabled - Check if SSO is enabled
set_theme - Change the UI theme (requires theme_name: azure/black/gray/white)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action == "set_theme" and not theme_name:
raise ToolError(
"theme_name is required for 'set_theme' action "
"(valid values: azure, black, gray, white)"
)
with tool_error_handler("customization", action, logger):
logger.info(f"Executing unraid_customization action={action}")
if action in QUERIES:
data = await make_graphql_request(QUERIES[action])
return {"success": True, "action": action, "data": data}
if action == "set_theme":
data = await make_graphql_request(MUTATIONS[action], {"theme": theme_name})
return {"success": True, "action": action, "data": data}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Customization tool registered successfully")

View File

@@ -1,342 +0,0 @@
"""Docker container management.
Provides the `unraid_docker` tool with 7 actions for container lifecycle
and network inspection.
"""
import re
from typing import Any, Literal, get_args
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.utils import safe_get
QUERIES: dict[str, str] = {
"list": """
query ListDockerContainers {
docker { containers(skipCache: false) {
id names image state status autoStart
} }
}
""",
"details": """
query GetContainerDetails {
docker { containers(skipCache: false) {
id names image imageId command created
ports { ip privatePort publicPort type }
sizeRootFs labels state status
hostConfig { networkMode }
networkSettings mounts autoStart
} }
}
""",
"networks": """
query GetDockerNetworks {
docker { networks { id name driver scope } }
}
""",
"network_details": """
query GetDockerNetwork {
docker { networks { id name driver scope enableIPv6 internal attachable containers options labels } }
}
""",
}
MUTATIONS: dict[str, str] = {
"start": """
mutation StartContainer($id: PrefixedID!) {
docker { start(id: $id) { id names state status } }
}
""",
"stop": """
mutation StopContainer($id: PrefixedID!) {
docker { stop(id: $id) { id names state status } }
}
""",
}
DESTRUCTIVE_ACTIONS: set[str] = set()
# NOTE (Code-M-07): "details" is listed here because it requires a container_id
# parameter, but unlike mutations it uses fuzzy name matching (not strict).
# This is intentional: read-only queries are safe with fuzzy matching.
_ACTIONS_REQUIRING_CONTAINER_ID = {"start", "stop", "details"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) | {"restart"}
DOCKER_ACTIONS = Literal[
"list",
"details",
"start",
"stop",
"restart",
"networks",
"network_details",
]
if set(get_args(DOCKER_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(DOCKER_ACTIONS))
_extra = set(get_args(DOCKER_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"DOCKER_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
# Full PrefixedID: 64 hex chars + optional suffix (e.g., ":local")
_DOCKER_ID_PATTERN = re.compile(r"^[a-f0-9]{64}(:[a-z0-9]+)?$", re.IGNORECASE)
# Short hex prefix: at least 12 hex chars (standard Docker short ID length)
_DOCKER_SHORT_ID_PATTERN = re.compile(r"^[a-f0-9]{12,63}$", re.IGNORECASE)
def find_container_by_identifier(
identifier: str, containers: list[dict[str, Any]], *, strict: bool = False
) -> dict[str, Any] | None:
"""Find a container by ID or name with optional fuzzy matching.
Match priority:
1. Exact ID match
2. Exact name match (case-sensitive)
When strict=False (default), also tries:
3. Name starts with identifier (case-insensitive)
4. Name contains identifier as substring (case-insensitive)
When strict=True, only exact matches (1 & 2) are used.
Use strict=True for mutations to prevent targeting the wrong container.
"""
if not containers:
return None
# Priority 1 & 2: exact matches
for c in containers:
if c.get("id") == identifier:
return c
if identifier in c.get("names", []):
return c
# Strict mode: no fuzzy matching allowed
if strict:
return None
id_lower = identifier.lower()
# Priority 3: prefix match (more precise than substring)
for c in containers:
for name in c.get("names", []):
if name.lower().startswith(id_lower):
logger.debug(f"Prefix match: '{identifier}' -> '{name}'")
return c
# Priority 4: substring match (least precise)
for c in containers:
for name in c.get("names", []):
if id_lower in name.lower():
logger.debug(f"Substring match: '{identifier}' -> '{name}'")
return c
return None
def get_available_container_names(containers: list[dict[str, Any]]) -> list[str]:
"""Extract all container names for error messages."""
names: list[str] = []
for c in containers:
names.extend(c.get("names", []))
return names
async def _resolve_container_id(container_id: str, *, strict: bool = False) -> str:
"""Resolve a container name/identifier to its actual PrefixedID.
Optimization: if the identifier is a full 64-char hex ID (with optional
:suffix), skip the container list fetch entirely and use it directly.
If it's a short hex prefix (12-63 chars), fetch the list and match by
ID prefix. Only fetch the container list for name-based lookups.
Args:
container_id: Container name or ID to resolve
strict: When True, only exact name/ID matches are allowed (no fuzzy).
Use for mutations to prevent targeting the wrong container.
"""
# Full PrefixedID: skip the list fetch entirely
if _DOCKER_ID_PATTERN.match(container_id):
return container_id
logger.info(f"Resolving container identifier '{container_id}' (strict={strict})")
list_query = """
query ResolveContainerID {
docker { containers(skipCache: true) { id names } }
}
"""
data = await make_graphql_request(list_query)
containers = safe_get(data, "docker", "containers", default=[])
# Short hex prefix: match by ID prefix before trying name matching
if _DOCKER_SHORT_ID_PATTERN.match(container_id):
id_lower = container_id.lower()
matches: list[dict[str, Any]] = []
for c in containers:
cid = (c.get("id") or "").lower()
if cid.startswith(id_lower) or cid.split(":")[0].startswith(id_lower):
matches.append(c)
if len(matches) == 1:
actual_id = str(matches[0].get("id", ""))
logger.info(f"Resolved short ID '{container_id}' -> '{actual_id}'")
return actual_id
if len(matches) > 1:
candidate_ids = [str(c.get("id", "")) for c in matches[:5]]
raise ToolError(
f"Short container ID prefix '{container_id}' is ambiguous. "
f"Matches: {', '.join(candidate_ids)}. Use a longer ID or exact name."
)
resolved = find_container_by_identifier(container_id, containers, strict=strict)
if resolved:
actual_id = str(resolved.get("id", ""))
logger.info(f"Resolved '{container_id}' -> '{actual_id}'")
return actual_id
available = get_available_container_names(containers)
if strict:
msg = (
f"Container '{container_id}' not found by exact match. "
f"Mutations require an exact container name or full ID — "
f"fuzzy/substring matching is not allowed for safety."
)
else:
msg = f"Container '{container_id}' not found."
if available:
msg += f" Available: {', '.join(available[:10])}"
raise ToolError(msg)
def register_docker_tool(mcp: FastMCP) -> None:
"""Register the unraid_docker tool with the FastMCP instance."""
@mcp.tool()
async def unraid_docker(
action: DOCKER_ACTIONS,
container_id: str | None = None,
network_id: str | None = None,
*,
confirm: bool = False,
) -> dict[str, Any]:
"""Manage Docker containers and networks.
Actions:
list - List all containers
details - Detailed info for a container (requires container_id)
start - Start a container (requires container_id)
stop - Stop a container (requires container_id)
restart - Stop then start a container (requires container_id)
networks - List Docker networks
network_details - Details of a network (requires network_id)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action in _ACTIONS_REQUIRING_CONTAINER_ID and not container_id:
raise ToolError(f"container_id is required for '{action}' action")
if action == "network_details" and not network_id:
raise ToolError("network_id is required for 'network_details' action")
with tool_error_handler("docker", action, logger):
logger.info(f"Executing unraid_docker action={action}")
# --- Read-only queries ---
if action == "list":
data = await make_graphql_request(QUERIES["list"])
containers = safe_get(data, "docker", "containers", default=[])
return {"containers": containers}
if action == "details":
# Resolve name -> ID first (skips list fetch if already an ID)
actual_id = await _resolve_container_id(container_id or "")
data = await make_graphql_request(QUERIES["details"])
containers = safe_get(data, "docker", "containers", default=[])
# Match by resolved ID (exact match, no second list fetch needed)
for c in containers:
if c.get("id") == actual_id:
return c
raise ToolError(f"Container '{container_id}' not found in details response.")
if action == "networks":
data = await make_graphql_request(QUERIES["networks"])
networks = safe_get(data, "docker", "networks", default=[])
return {"networks": networks}
if action == "network_details":
data = await make_graphql_request(QUERIES["network_details"])
all_networks = safe_get(data, "docker", "networks", default=[])
# Filter client-side by network_id since the API returns all networks
for net in all_networks:
if net.get("id") == network_id or net.get("name") == network_id:
return dict(net)
raise ToolError(f"Network '{network_id}' not found.")
# --- Mutations (strict matching: no fuzzy/substring) ---
if action == "restart":
actual_id = await _resolve_container_id(container_id or "", strict=True)
# Stop (idempotent: treat "already stopped" as success)
stop_data = await make_graphql_request(
MUTATIONS["stop"],
{"id": actual_id},
operation_context={"operation": "stop"},
)
stop_was_idempotent = stop_data.get("idempotent_success", False)
# Start (idempotent: treat "already running" as success)
start_data = await make_graphql_request(
MUTATIONS["start"],
{"id": actual_id},
operation_context={"operation": "start"},
)
if start_data.get("idempotent_success"):
result = {}
else:
result = safe_get(start_data, "docker", "start", default={})
response: dict[str, Any] = {
"success": True,
"action": "restart",
"container": result,
}
if stop_was_idempotent:
response["note"] = "Container was already stopped before restart"
return response
# Single-container mutations (start, stop)
if action in MUTATIONS:
actual_id = await _resolve_container_id(container_id or "", strict=True)
op_context: dict[str, str] | None = (
{"operation": action} if action in ("start", "stop") else None
)
data = await make_graphql_request(
MUTATIONS[action],
{"id": actual_id},
operation_context=op_context,
)
# Handle idempotent success
if data.get("idempotent_success"):
return {
"success": True,
"action": action,
"idempotent": True,
"message": f"Container already in desired state for '{action}'",
}
docker_data = data.get("docker") or {}
field = action
result_container = docker_data.get(field)
return {
"success": True,
"action": action,
"container": result_container,
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Docker tool registered successfully")

View File

@@ -1,275 +0,0 @@
"""Health monitoring and diagnostics.
Provides the `unraid_health` tool with 4 actions for system health checks,
connection testing, subscription diagnostics, and credential setup.
"""
import datetime
import time
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..config.settings import (
CREDENTIALS_ENV_PATH,
UNRAID_API_URL,
UNRAID_MCP_HOST,
UNRAID_MCP_PORT,
UNRAID_MCP_TRANSPORT,
VERSION,
)
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.setup import elicit_and_configure
from ..core.utils import safe_display_url
from ..subscriptions.utils import _analyze_subscription_status
ALL_ACTIONS = {"check", "test_connection", "diagnose", "setup"}
HEALTH_ACTIONS = Literal["check", "test_connection", "diagnose", "setup"]
if set(get_args(HEALTH_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(HEALTH_ACTIONS))
_extra = set(get_args(HEALTH_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
"HEALTH_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing in HEALTH_ACTIONS: {_missing}; extra in HEALTH_ACTIONS: {_extra}"
)
# Severity ordering: only upgrade, never downgrade
_SEVERITY = {"healthy": 0, "warning": 1, "degraded": 2, "unhealthy": 3}
def _server_info() -> dict[str, Any]:
"""Return the standard server info block used in health responses."""
return {
"name": "Unraid MCP Server",
"version": VERSION,
"transport": UNRAID_MCP_TRANSPORT,
"host": UNRAID_MCP_HOST,
"port": UNRAID_MCP_PORT,
}
def register_health_tool(mcp: FastMCP) -> None:
"""Register the unraid_health tool with the FastMCP instance."""
@mcp.tool()
async def unraid_health(
action: HEALTH_ACTIONS,
ctx: Context | None = None,
) -> dict[str, Any] | str:
"""Monitor Unraid MCP server and system health.
Actions:
setup - Configure Unraid credentials via interactive elicitation
check - Comprehensive health check (API latency, array, notifications, Docker)
test_connection - Quick connectivity test (just checks { online })
diagnose - Subscription system diagnostics
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action == "setup":
configured = await elicit_and_configure(ctx)
if configured:
return (
"✅ Credentials configured successfully. You can now use all Unraid MCP tools."
)
return (
f"⚠️ Credentials not configured.\n\n"
f"Your MCP client may not support elicitation, or setup was cancelled.\n\n"
f"**Manual setup** — create `{CREDENTIALS_ENV_PATH}` with:\n"
f"```\n"
f"UNRAID_API_URL=https://your-unraid-server:port\n"
f"UNRAID_API_KEY=your-api-key\n"
f"```\n\n"
f"Then run any Unraid tool to connect."
)
with tool_error_handler("health", action, logger):
logger.info(f"Executing unraid_health action={action}")
if action == "test_connection":
start = time.time()
data = await make_graphql_request("query { online }")
latency = round((time.time() - start) * 1000, 2)
return {
"status": "connected",
"online": data.get("online"),
"latency_ms": latency,
}
if action == "check":
return await _comprehensive_check()
if action == "diagnose":
return await _diagnose_subscriptions()
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Health tool registered successfully")
async def _comprehensive_check() -> dict[str, Any]:
"""Run comprehensive health check against the Unraid system."""
start_time = time.time()
health_severity = 0 # Track as int to prevent downgrade
issues: list[str] = []
def _escalate(level: str) -> None:
nonlocal health_severity
health_severity = max(health_severity, _SEVERITY.get(level, 0))
try:
query = """
query ComprehensiveHealthCheck {
info {
machineId time
versions { core { unraid } }
os { uptime }
}
array { state }
notifications {
overview { unread { alert warning total } }
}
docker {
containers(skipCache: true) { id state status }
}
}
"""
data = await make_graphql_request(query)
api_latency = round((time.time() - start_time) * 1000, 2)
health_info: dict[str, Any] = {
"status": "healthy",
"timestamp": datetime.datetime.now(datetime.UTC).isoformat(),
"api_latency_ms": api_latency,
"server": _server_info(),
}
if not data:
health_info["status"] = "unhealthy"
health_info["issues"] = ["No response from Unraid API"]
return health_info
# System info
info = data.get("info") or {}
if info:
health_info["unraid_system"] = {
"status": "connected",
"url": safe_display_url(UNRAID_API_URL),
"machine_id": info.get("machineId"),
"version": ((info.get("versions") or {}).get("core") or {}).get("unraid"),
"uptime": (info.get("os") or {}).get("uptime"),
}
else:
_escalate("degraded")
issues.append("Unable to retrieve system info")
# Array
array_info = data.get("array") or {}
if array_info:
state = array_info.get("state", "unknown")
health_info["array_status"] = {
"state": state,
"healthy": state in ("STARTED", "STOPPED"),
}
if state not in ("STARTED", "STOPPED"):
_escalate("warning")
issues.append(f"Array in unexpected state: {state}")
else:
_escalate("warning")
issues.append("Unable to retrieve array status")
# Notifications
notifications = data.get("notifications") or {}
if notifications and notifications.get("overview"):
unread = notifications["overview"].get("unread") or {}
alerts = unread.get("alert", 0)
health_info["notifications"] = {
"unread_total": unread.get("total", 0),
"unread_alerts": alerts,
"unread_warnings": unread.get("warning", 0),
}
if alerts > 0:
_escalate("warning")
issues.append(f"{alerts} unread alert(s)")
# Docker
docker = data.get("docker") or {}
if docker and docker.get("containers"):
containers = docker["containers"]
health_info["docker_services"] = {
"total": len(containers),
"running": len([c for c in containers if c.get("state") == "running"]),
"stopped": len([c for c in containers if c.get("state") == "exited"]),
}
# Latency assessment
if api_latency > 10000:
_escalate("degraded")
issues.append(f"Very high API latency: {api_latency}ms")
elif api_latency > 5000:
_escalate("warning")
issues.append(f"High API latency: {api_latency}ms")
# Resolve final status from severity level
severity_to_status = {v: k for k, v in _SEVERITY.items()}
health_info["status"] = severity_to_status.get(health_severity, "healthy")
if issues:
health_info["issues"] = issues
health_info["performance"] = {
"api_response_time_ms": api_latency,
"check_duration_ms": round((time.time() - start_time) * 1000, 2),
}
return health_info
except Exception as e:
# Intentionally broad: health checks must always return a result,
# even on unexpected failures, so callers never get an unhandled exception.
logger.error(f"Health check failed: {e}", exc_info=True)
return {
"status": "unhealthy",
"timestamp": datetime.datetime.now(datetime.UTC).isoformat(),
"error": str(e),
"server": _server_info(),
}
async def _diagnose_subscriptions() -> dict[str, Any]:
"""Import and run subscription diagnostics."""
try:
from ..subscriptions.manager import subscription_manager
from ..subscriptions.resources import ensure_subscriptions_started
await ensure_subscriptions_started()
status = await subscription_manager.get_subscription_status()
error_count, connection_issues = _analyze_subscription_status(status)
return {
"timestamp": datetime.datetime.now(datetime.UTC).isoformat(),
"environment": {
"auto_start_enabled": subscription_manager.auto_start_enabled,
"max_reconnect_attempts": subscription_manager.max_reconnect_attempts,
"api_url_configured": bool(UNRAID_API_URL),
},
"subscriptions": status,
"summary": {
"total_configured": len(subscription_manager.subscription_configs),
"active_count": len(subscription_manager.active_subscriptions),
"with_data": len(subscription_manager.resource_data),
"in_error_state": error_count,
"connection_issues": connection_issues,
},
}
except ImportError as e:
raise ToolError("Subscription modules not available") from e
except Exception as e:
raise ToolError(f"Failed to generate diagnostics: {e!s}") from e

View File

@@ -1,449 +0,0 @@
"""System information and server status queries.
Provides the `unraid_info` tool with 19 read-only actions for retrieving
system information, array status, network config, and server metadata.
"""
from typing import Any, Literal, get_args
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.utils import format_kb
# Pre-built queries keyed by action name
QUERIES: dict[str, str] = {
"overview": """
query GetSystemInfo {
info {
os { platform distro release codename kernel arch hostname logofile serial build uptime }
cpu { manufacturer brand vendor family model stepping revision voltage speed speedmin speedmax threads cores processors socket cache }
memory {
layout { bank type clockSpeed formFactor manufacturer partNum serialNum }
}
baseboard { manufacturer model version serial assetTag }
system { manufacturer model version serial uuid sku }
versions { core { unraid api kernel } packages { openssl node npm pm2 git nginx php docker } }
machineId
time
}
}
""",
"array": """
query GetArrayStatus {
array {
id
state
capacity {
kilobytes { free used total }
disks { free used total }
}
boot { id idx name device size status rotational temp numReads numWrites numErrors fsSize fsFree fsUsed exportable type warning critical fsType comment format transport color }
parities { id idx name device size status rotational temp numReads numWrites numErrors fsSize fsFree fsUsed exportable type warning critical fsType comment format transport color }
disks { id idx name device size status rotational temp numReads numWrites numErrors fsSize fsFree fsUsed exportable type warning critical fsType comment format transport color }
caches { id idx name device size status rotational temp numReads numWrites numErrors fsSize fsFree fsUsed exportable type warning critical fsType comment format transport color }
}
}
""",
"network": """
query GetNetworkInfo {
servers { id name status wanip lanip localurl remoteurl }
vars { id port portssl localTld useSsl }
}
""",
"registration": """
query GetRegistrationInfo {
registration {
id type
keyFile { location }
state expiration updateExpiration
}
}
""",
"connect": """
query GetConnectSettings {
connect { id dynamicRemoteAccess { enabledType runningType error } }
}
""",
"variables": """
query GetSelectiveUnraidVariables {
vars {
id version name timeZone comment security workgroup domain domainShort
hideDotFiles localMaster enableFruit useNtp domainLogin sysModel
sysFlashSlots useSsl port portssl localTld bindMgt useTelnet porttelnet
useSsh portssh startPage startArray shutdownTimeout
shareSmbEnabled shareNfsEnabled shareAfpEnabled shareCacheEnabled
shareAvahiEnabled safeMode startMode configValid configError joinStatus
deviceCount flashGuid flashProduct flashVendor mdState mdVersion
shareCount shareSmbCount shareNfsCount shareAfpCount shareMoverActive
}
}
""",
"metrics": """
query GetMetrics {
metrics { cpu { percentTotal } memory { total used free available buffcache percentTotal } }
}
""",
"services": """
query GetServices {
services { name online version }
}
""",
"display": """
query GetDisplay {
info { display { theme } }
}
""",
"config": """
query GetConfig {
config { valid error }
}
""",
"online": """
query GetOnline { online }
""",
"owner": """
query GetOwner {
owner { username avatar url }
}
""",
"settings": """
query GetSettings {
settings { unified { values } }
}
""",
"server": """
query GetServer {
info {
os { hostname uptime }
versions { core { unraid } }
machineId time
}
array { state }
online
}
""",
"servers": """
query GetServers {
servers { id name status wanip lanip localurl remoteurl }
}
""",
"flash": """
query GetFlash {
flash { id vendor product }
}
""",
"ups_devices": """
query GetUpsDevices {
upsDevices { id name model status battery { chargeLevel estimatedRuntime health } power { loadPercentage inputVoltage outputVoltage } }
}
""",
"ups_device": """
query GetUpsDevice($id: String!) {
upsDeviceById(id: $id) { id name model status battery { chargeLevel estimatedRuntime health } power { loadPercentage inputVoltage outputVoltage nominalPower currentPower } }
}
""",
"ups_config": """
query GetUpsConfig {
upsConfiguration { service upsCable upsType device batteryLevel minutes timeout killUps upsName }
}
""",
}
MUTATIONS: dict[str, str] = {}
DESTRUCTIVE_ACTIONS: set[str] = set()
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
INFO_ACTIONS = Literal[
"overview",
"array",
"network",
"registration",
"connect",
"variables",
"metrics",
"services",
"display",
"config",
"online",
"owner",
"settings",
"server",
"servers",
"flash",
"ups_devices",
"ups_device",
"ups_config",
]
if set(get_args(INFO_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(INFO_ACTIONS))
_extra = set(get_args(INFO_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"QUERIES keys and INFO_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def _process_system_info(raw_info: dict[str, Any]) -> dict[str, Any]:
"""Process raw system info into summary + details."""
summary: dict[str, Any] = {}
if raw_info.get("os"):
os_info = raw_info["os"]
summary["os"] = (
f"{os_info.get('distro') or 'unknown'} {os_info.get('release') or 'unknown'} "
f"({os_info.get('platform') or 'unknown'}, {os_info.get('arch') or 'unknown'})"
)
summary["hostname"] = os_info.get("hostname") or "unknown"
summary["uptime"] = os_info.get("uptime")
if raw_info.get("cpu"):
cpu = raw_info["cpu"]
summary["cpu"] = (
f"{cpu.get('manufacturer') or 'unknown'} {cpu.get('brand') or 'unknown'} "
f"({cpu.get('cores') or '?'} cores, {cpu.get('threads') or '?'} threads)"
)
if raw_info.get("memory") and raw_info["memory"].get("layout"):
mem_layout = raw_info["memory"]["layout"]
summary["memory_layout_details"] = []
for stick in mem_layout:
summary["memory_layout_details"].append(
f"Bank {stick.get('bank') or '?'}: Type {stick.get('type') or '?'}, "
f"Speed {stick.get('clockSpeed') or '?'}MHz, "
f"Manufacturer: {stick.get('manufacturer') or '?'}, "
f"Part: {stick.get('partNum') or '?'}"
)
summary["memory_summary"] = (
"Stick layout details retrieved. Overall total/used/free memory stats "
"are unavailable due to API limitations."
)
else:
summary["memory_summary"] = "Memory information not available."
return {"summary": summary, "details": raw_info}
def _analyze_disk_health(disks: list[dict[str, Any]]) -> dict[str, int]:
"""Analyze health status of disk arrays."""
counts = {
"healthy": 0,
"failed": 0,
"missing": 0,
"new": 0,
"warning": 0,
"critical": 0,
"unknown": 0,
}
for disk in disks:
status = disk.get("status", "").upper()
warning = disk.get("warning")
critical = disk.get("critical")
if status == "DISK_OK":
if critical:
counts["critical"] += 1
elif warning:
counts["warning"] += 1
else:
counts["healthy"] += 1
elif status in ("DISK_DSBL", "DISK_INVALID"):
counts["failed"] += 1
elif status == "DISK_NP":
counts["missing"] += 1
elif status == "DISK_NEW":
counts["new"] += 1
else:
counts["unknown"] += 1
return counts
def _process_array_status(raw: dict[str, Any]) -> dict[str, Any]:
"""Process raw array data into summary + details."""
summary: dict[str, Any] = {"state": raw.get("state")}
if raw.get("capacity") and raw["capacity"].get("kilobytes"):
kb = raw["capacity"]["kilobytes"]
summary["capacity_total"] = format_kb(kb.get("total"))
summary["capacity_used"] = format_kb(kb.get("used"))
summary["capacity_free"] = format_kb(kb.get("free"))
summary["num_parity_disks"] = len(raw.get("parities", []))
summary["num_data_disks"] = len(raw.get("disks", []))
summary["num_cache_pools"] = len(raw.get("caches", []))
health_summary: dict[str, Any] = {}
for key, label in [
("parities", "parity_health"),
("disks", "data_health"),
("caches", "cache_health"),
]:
if raw.get(key):
health_summary[label] = _analyze_disk_health(raw[key])
total_failed = sum(h.get("failed", 0) for h in health_summary.values())
total_critical = sum(h.get("critical", 0) for h in health_summary.values())
total_missing = sum(h.get("missing", 0) for h in health_summary.values())
total_warning = sum(h.get("warning", 0) for h in health_summary.values())
if total_failed > 0 or total_critical > 0:
overall = "CRITICAL"
elif total_missing > 0:
overall = "DEGRADED"
elif total_warning > 0:
overall = "WARNING"
else:
overall = "HEALTHY"
summary["overall_health"] = overall
summary["health_summary"] = health_summary
return {"summary": summary, "details": raw}
def register_info_tool(mcp: FastMCP) -> None:
"""Register the unraid_info tool with the FastMCP instance."""
@mcp.tool()
async def unraid_info(
action: INFO_ACTIONS,
device_id: str | None = None,
) -> dict[str, Any]:
"""Query Unraid system information.
Actions:
overview - OS, CPU, memory, baseboard, versions
array - Array state, capacity, disk health
network - Access URLs, interfaces
registration - License type, state, expiration
connect - Unraid Connect settings
variables - System variables and configuration
metrics - CPU and memory utilization
services - Running services
display - Theme settings
config - Configuration validity
online - Server online status
owner - Server owner info
settings - All unified settings
server - Quick server summary
servers - Connected servers list
flash - Flash drive info
ups_devices - List UPS devices
ups_device - Single UPS device (requires device_id)
ups_config - UPS configuration
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action == "ups_device" and not device_id:
raise ToolError("device_id is required for ups_device action")
# connect is not available on all Unraid API versions
if action == "connect":
raise ToolError(
"The 'connect' query is not available on this Unraid API version. "
"Use the 'settings' action for API and SSO configuration."
)
query = QUERIES[action]
variables: dict[str, Any] | None = None
if action == "ups_device":
variables = {"id": device_id}
# Lookup tables for common response patterns
# Simple dict actions: action -> GraphQL response key
dict_actions: dict[str, str] = {
"registration": "registration",
"variables": "vars",
"metrics": "metrics",
"config": "config",
"owner": "owner",
"flash": "flash",
"ups_device": "upsDeviceById",
"ups_config": "upsConfiguration",
}
# List-wrapped actions: action -> (GraphQL response key, output key)
list_actions: dict[str, tuple[str, str]] = {
"services": ("services", "services"),
"servers": ("servers", "servers"),
"ups_devices": ("upsDevices", "ups_devices"),
}
with tool_error_handler("info", action, logger):
logger.info(f"Executing unraid_info action={action}")
data = await make_graphql_request(query, variables)
# Special-case actions with custom processing
if action == "overview":
raw = data.get("info") or {}
if not raw:
raise ToolError("No system info returned from Unraid API")
return _process_system_info(raw)
if action == "array":
raw = data.get("array") or {}
if not raw:
raise ToolError("No array information returned from Unraid API")
return _process_array_status(raw)
if action == "display":
info = data.get("info") or {}
return dict(info.get("display") or {})
if action == "online":
return {"online": data.get("online")}
if action == "settings":
settings = data.get("settings") or {}
if not settings:
raise ToolError(
"No settings data returned from Unraid API. Check API permissions."
)
if not settings.get("unified"):
logger.warning(f"Settings returned unexpected structure: {settings.keys()}")
raise ToolError(
f"Unexpected settings structure. Expected 'unified' key, got: {list(settings.keys())}"
)
values = settings["unified"].get("values") or {}
return dict(values) if isinstance(values, dict) else {"raw": values}
if action == "server":
return data
if action == "network":
servers_data = data.get("servers") or []
vars_data = data.get("vars") or {}
access_urls = []
for srv in servers_data:
if srv.get("lanip"):
access_urls.append(
{"type": "LAN", "ipv4": srv["lanip"], "url": srv.get("localurl")}
)
if srv.get("wanip"):
access_urls.append(
{"type": "WAN", "ipv4": srv["wanip"], "url": srv.get("remoteurl")}
)
return {
"accessUrls": access_urls,
"httpPort": vars_data.get("port"),
"httpsPort": vars_data.get("portssl"),
"localTld": vars_data.get("localTld"),
"useSsl": vars_data.get("useSsl"),
}
# Simple dict-returning actions
if action in dict_actions:
return dict(data.get(dict_actions[action]) or {})
# List-wrapped actions
if action in list_actions:
response_key, output_key = list_actions[action]
items = data.get(response_key) or []
normalized_items = list(items) if isinstance(items, list) else []
return {output_key: normalized_items}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Info tool registered successfully")

View File

@@ -1,202 +0,0 @@
"""API key management.
Provides the `unraid_keys` tool with 5 actions for listing, viewing,
creating, updating, and deleting API keys.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"list": """
query ListApiKeys {
apiKeys { id name roles permissions { resource actions } createdAt }
}
""",
"get": """
query GetApiKey($id: PrefixedID!) {
apiKey(id: $id) { id name roles permissions { resource actions } createdAt }
}
""",
}
MUTATIONS: dict[str, str] = {
"create": """
mutation CreateApiKey($input: CreateApiKeyInput!) {
apiKey { create(input: $input) { id name key roles } }
}
""",
"update": """
mutation UpdateApiKey($input: UpdateApiKeyInput!) {
apiKey { update(input: $input) { id name roles } }
}
""",
"delete": """
mutation DeleteApiKey($input: DeleteApiKeyInput!) {
apiKey { delete(input: $input) }
}
""",
"add_role": """
mutation AddRole($input: AddRoleForApiKeyInput!) {
apiKey { addRole(input: $input) }
}
""",
"remove_role": """
mutation RemoveRole($input: RemoveRoleFromApiKeyInput!) {
apiKey { removeRole(input: $input) }
}
""",
}
DESTRUCTIVE_ACTIONS = {"delete"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
KEY_ACTIONS = Literal[
"add_role",
"create",
"delete",
"get",
"list",
"remove_role",
"update",
]
if set(get_args(KEY_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(KEY_ACTIONS))
_extra = set(get_args(KEY_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"KEY_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_keys_tool(mcp: FastMCP) -> None:
"""Register the unraid_keys tool with the FastMCP instance."""
@mcp.tool()
async def unraid_keys(
action: KEY_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
key_id: str | None = None,
name: str | None = None,
roles: list[str] | None = None,
permissions: list[str] | None = None,
) -> dict[str, Any]:
"""Manage Unraid API keys.
Actions:
list - List all API keys
get - Get a specific API key (requires key_id)
create - Create a new API key (requires name; optional roles, permissions)
update - Update an API key (requires key_id; optional name, roles)
delete - Delete API keys (requires key_id, confirm=True)
add_role - Add a role to an API key (requires key_id and roles)
remove_role - Remove a role from an API key (requires key_id and roles)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
f"Delete API key **{key_id}**. Any clients using this key will lose access.",
)
with tool_error_handler("keys", action, logger):
logger.info(f"Executing unraid_keys action={action}")
if action == "list":
data = await make_graphql_request(QUERIES["list"])
keys = data.get("apiKeys", [])
return {"keys": list(keys) if isinstance(keys, list) else []}
if action == "get":
if not key_id:
raise ToolError("key_id is required for 'get' action")
data = await make_graphql_request(QUERIES["get"], {"id": key_id})
return dict(data.get("apiKey") or {})
if action == "create":
if not name:
raise ToolError("name is required for 'create' action")
input_data: dict[str, Any] = {"name": name}
if roles is not None:
input_data["roles"] = roles
if permissions is not None:
input_data["permissions"] = permissions
data = await make_graphql_request(MUTATIONS["create"], {"input": input_data})
created_key = (data.get("apiKey") or {}).get("create")
if not created_key:
raise ToolError("Failed to create API key: no data returned from server")
return {"success": True, "key": created_key}
if action == "update":
if not key_id:
raise ToolError("key_id is required for 'update' action")
input_data: dict[str, Any] = {"id": key_id}
if name:
input_data["name"] = name
if roles is not None:
input_data["roles"] = roles
data = await make_graphql_request(MUTATIONS["update"], {"input": input_data})
updated_key = (data.get("apiKey") or {}).get("update")
if not updated_key:
raise ToolError("Failed to update API key: no data returned from server")
return {"success": True, "key": updated_key}
if action == "delete":
if not key_id:
raise ToolError("key_id is required for 'delete' action")
data = await make_graphql_request(MUTATIONS["delete"], {"input": {"ids": [key_id]}})
result = (data.get("apiKey") or {}).get("delete")
if not result:
raise ToolError(
f"Failed to delete API key '{key_id}': no confirmation from server"
)
return {
"success": True,
"message": f"API key '{key_id}' deleted",
}
if action == "add_role":
if not key_id:
raise ToolError("key_id is required for 'add_role' action")
if not roles or len(roles) == 0:
raise ToolError(
"role is required for 'add_role' action (pass as roles=['ROLE_NAME'])"
)
data = await make_graphql_request(
MUTATIONS["add_role"],
{"input": {"apiKeyId": key_id, "role": roles[0]}},
)
return {"success": True, "message": f"Role '{roles[0]}' added to key '{key_id}'"}
if action == "remove_role":
if not key_id:
raise ToolError("key_id is required for 'remove_role' action")
if not roles or len(roles) == 0:
raise ToolError(
"role is required for 'remove_role' action (pass as roles=['ROLE_NAME'])"
)
data = await make_graphql_request(
MUTATIONS["remove_role"],
{"input": {"apiKeyId": key_id, "role": roles[0]}},
)
return {
"success": True,
"message": f"Role '{roles[0]}' removed from key '{key_id}'",
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Keys tool registered successfully")

View File

@@ -1,142 +0,0 @@
"""Real-time subscription snapshot tool.
Provides the `unraid_live` tool with 11 actions — one per GraphQL
subscription. Each action opens a transient WebSocket, receives one event
(or collects events for `collect_for` seconds), then closes.
Use `subscribe_once` actions for current-state reads (cpu, memory, array_state).
Use `subscribe_collect` actions for event streams (notification_feed, log_tail).
"""
import os
from typing import Any, Literal, get_args
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.exceptions import ToolError, tool_error_handler
from ..subscriptions.queries import COLLECT_ACTIONS, SNAPSHOT_ACTIONS
from ..subscriptions.snapshot import subscribe_collect, subscribe_once
_ALLOWED_LOG_PREFIXES = ("/var/log/", "/boot/logs/", "/mnt/")
ALL_LIVE_ACTIONS = set(SNAPSHOT_ACTIONS) | set(COLLECT_ACTIONS)
LIVE_ACTIONS = Literal[
"array_state",
"cpu",
"cpu_telemetry",
"log_tail",
"memory",
"notification_feed",
"notifications_overview",
"owner",
"parity_progress",
"server_status",
"ups_status",
]
if set(get_args(LIVE_ACTIONS)) != ALL_LIVE_ACTIONS:
_missing = ALL_LIVE_ACTIONS - set(get_args(LIVE_ACTIONS))
_extra = set(get_args(LIVE_ACTIONS)) - ALL_LIVE_ACTIONS
raise RuntimeError(
f"LIVE_ACTIONS and ALL_LIVE_ACTIONS are out of sync. "
f"Missing: {_missing or 'none'}. Extra: {_extra or 'none'}"
)
def register_live_tool(mcp: FastMCP) -> None:
"""Register the unraid_live tool with the FastMCP instance."""
@mcp.tool()
async def unraid_live(
action: LIVE_ACTIONS,
path: str | None = None,
collect_for: float = 5.0,
timeout: float = 10.0, # noqa: ASYNC109
) -> dict[str, Any]:
"""Get real-time data from Unraid via WebSocket subscriptions.
Each action opens a transient WebSocket, receives data, then closes.
Snapshot actions (return current state):
cpu - Real-time CPU utilization (all cores)
memory - Real-time memory and swap utilization
cpu_telemetry - CPU power draw and temperature per package
array_state - Live array state and parity status
parity_progress - Live parity check progress
ups_status - Real-time UPS battery and power state
notifications_overview - Live notification counts by severity
owner - Live owner info
server_status - Live server connection state
Collection actions (collect events for `collect_for` seconds):
notification_feed - Collect new notification events (default: 5s window)
log_tail - Tail a log file (requires path; default: 5s window)
Parameters:
path - Log file path for log_tail action (required)
collect_for - Seconds to collect events for collect actions (default: 5.0)
timeout - WebSocket connection/handshake timeout in seconds (default: 10.0)
"""
if action not in ALL_LIVE_ACTIONS:
raise ToolError(
f"Invalid action '{action}'. Must be one of: {sorted(ALL_LIVE_ACTIONS)}"
)
# Validate log_tail path before entering the error handler context.
if action == "log_tail":
if not path:
raise ToolError("path is required for 'log_tail' action")
# Resolve to prevent path traversal attacks (same as storage.py).
# Using os.path.realpath instead of anyio.Path.resolve() because the
# async variant blocks on NFS-mounted paths under /mnt/ (Perf-AI-1).
normalized = os.path.realpath(path) # noqa: ASYNC240
if not any(normalized.startswith(p) for p in _ALLOWED_LOG_PREFIXES):
raise ToolError(
f"path must start with one of: {', '.join(_ALLOWED_LOG_PREFIXES)}. Got: {path!r}"
)
path = normalized
with tool_error_handler("live", action, logger):
logger.info(f"Executing unraid_live action={action} timeout={timeout}")
if action in SNAPSHOT_ACTIONS:
data = await subscribe_once(SNAPSHOT_ACTIONS[action], timeout=timeout)
return {"success": True, "action": action, "data": data}
# Collect actions
if action == "log_tail":
events = await subscribe_collect(
COLLECT_ACTIONS["log_tail"],
variables={"path": path},
collect_for=collect_for,
timeout=timeout,
)
return {
"success": True,
"action": action,
"path": path,
"collect_for": collect_for,
"event_count": len(events),
"events": events,
}
if action == "notification_feed":
events = await subscribe_collect(
COLLECT_ACTIONS["notification_feed"],
collect_for=collect_for,
timeout=timeout,
)
return {
"success": True,
"action": action,
"collect_for": collect_for,
"event_count": len(events),
"events": events,
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Live tool registered successfully")

View File

@@ -1,311 +0,0 @@
"""Notification management.
Provides the `unraid_notifications` tool with 13 actions for viewing,
creating, archiving, and deleting system notifications.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"overview": """
query GetNotificationsOverview {
notifications {
overview {
unread { info warning alert total }
archive { info warning alert total }
}
}
}
""",
"list": """
query ListNotifications($filter: NotificationFilter!) {
notifications {
list(filter: $filter) {
id title subject description importance link type timestamp formattedTimestamp
}
}
}
""",
}
MUTATIONS: dict[str, str] = {
"create": """
mutation CreateNotification($input: NotificationData!) {
createNotification(input: $input) { id title importance }
}
""",
"archive": """
mutation ArchiveNotification($id: PrefixedID!) {
archiveNotification(id: $id) { id title importance }
}
""",
"unread": """
mutation UnreadNotification($id: PrefixedID!) {
unreadNotification(id: $id) { id title importance }
}
""",
"delete": """
mutation DeleteNotification($id: PrefixedID!, $type: NotificationType!) {
deleteNotification(id: $id, type: $type) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"delete_archived": """
mutation DeleteArchivedNotifications {
deleteArchivedNotifications {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"archive_all": """
mutation ArchiveAllNotifications($importance: NotificationImportance) {
archiveAll(importance: $importance) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"archive_many": """
mutation ArchiveNotifications($ids: [PrefixedID!]!) {
archiveNotifications(ids: $ids) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"unarchive_many": """
mutation UnarchiveNotifications($ids: [PrefixedID!]!) {
unarchiveNotifications(ids: $ids) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"unarchive_all": """
mutation UnarchiveAll($importance: NotificationImportance) {
unarchiveAll(importance: $importance) {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
"recalculate": """
mutation RecalculateOverview {
recalculateOverview {
unread { info warning alert total }
archive { info warning alert total }
}
}
""",
}
DESTRUCTIVE_ACTIONS = {"delete", "delete_archived"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
_VALID_IMPORTANCE = {"ALERT", "WARNING", "INFO"}
NOTIFICATION_ACTIONS = Literal[
"overview",
"list",
"create",
"archive",
"unread",
"delete",
"delete_archived",
"archive_all",
"archive_many",
"unarchive_many",
"unarchive_all",
"recalculate",
]
if set(get_args(NOTIFICATION_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(NOTIFICATION_ACTIONS))
_extra = set(get_args(NOTIFICATION_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"NOTIFICATION_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_notifications_tool(mcp: FastMCP) -> None:
"""Register the unraid_notifications tool with the FastMCP instance."""
@mcp.tool()
async def unraid_notifications(
action: NOTIFICATION_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
notification_id: str | None = None,
notification_ids: list[str] | None = None,
notification_type: str | None = None,
importance: str | None = None,
offset: int = 0,
limit: int = 20,
list_type: str = "UNREAD",
title: str | None = None,
subject: str | None = None,
description: str | None = None,
) -> dict[str, Any]:
"""Manage Unraid system notifications.
Actions:
overview - Notification counts by severity (unread/archive)
list - List notifications with filtering (list_type=UNREAD/ARCHIVE, importance=INFO/WARNING/ALERT)
create - Create notification (requires title, subject, description, importance)
archive - Archive a notification (requires notification_id)
unread - Mark notification as unread (requires notification_id)
delete - Delete a notification (requires notification_id, notification_type, confirm=True)
delete_archived - Delete all archived notifications (requires confirm=True)
archive_all - Archive all notifications (optional importance filter)
archive_many - Archive multiple notifications by ID (requires notification_ids)
unarchive_many - Move notifications back to unread (requires notification_ids)
unarchive_all - Move all archived notifications to unread (optional importance filter)
recalculate - Recompute overview counts from disk
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
{
"delete": f"Delete notification **{notification_id}** permanently. This cannot be undone.",
"delete_archived": "Delete ALL archived notifications permanently. This cannot be undone.",
},
)
# Validate enum parameters before dispatching to GraphQL (SEC-M04).
# Invalid values waste a rate-limited request and may leak schema details in errors.
valid_list_types = frozenset({"UNREAD", "ARCHIVE"})
valid_importance = frozenset({"INFO", "WARNING", "ALERT"})
valid_notif_types = frozenset({"UNREAD", "ARCHIVE"})
if list_type.upper() not in valid_list_types:
raise ToolError(
f"Invalid list_type '{list_type}'. Must be one of: {sorted(valid_list_types)}"
)
if importance is not None and importance.upper() not in valid_importance:
raise ToolError(
f"Invalid importance '{importance}'. Must be one of: {sorted(valid_importance)}"
)
if notification_type is not None and notification_type.upper() not in valid_notif_types:
raise ToolError(
f"Invalid notification_type '{notification_type}'. "
f"Must be one of: {sorted(valid_notif_types)}"
)
with tool_error_handler("notifications", action, logger):
logger.info(f"Executing unraid_notifications action={action}")
if action == "overview":
data = await make_graphql_request(QUERIES["overview"])
notifications = data.get("notifications") or {}
return dict(notifications.get("overview") or {})
if action == "list":
filter_vars: dict[str, Any] = {
"type": list_type.upper(),
"offset": offset,
"limit": limit,
}
if importance:
filter_vars["importance"] = importance.upper()
data = await make_graphql_request(QUERIES["list"], {"filter": filter_vars})
notifications = data.get("notifications", {})
return {"notifications": notifications.get("list", [])}
if action == "create":
if title is None or subject is None or description is None or importance is None:
raise ToolError("create requires title, subject, description, and importance")
if importance.upper() not in _VALID_IMPORTANCE:
raise ToolError(
f"importance must be one of: {', '.join(sorted(_VALID_IMPORTANCE))}. "
f"Got: '{importance}'"
)
if len(title) > 200:
raise ToolError(f"title must be at most 200 characters (got {len(title)})")
if len(subject) > 500:
raise ToolError(f"subject must be at most 500 characters (got {len(subject)})")
if len(description) > 2000:
raise ToolError(
f"description must be at most 2000 characters (got {len(description)})"
)
input_data = {
"title": title,
"subject": subject,
"description": description,
"importance": importance.upper(),
}
data = await make_graphql_request(MUTATIONS["create"], {"input": input_data})
notification = data.get("createNotification")
if notification is None:
raise ToolError("Notification creation failed: server returned no data")
return {"success": True, "notification": notification}
if action in ("archive", "unread"):
if not notification_id:
raise ToolError(f"notification_id is required for '{action}' action")
data = await make_graphql_request(MUTATIONS[action], {"id": notification_id})
return {"success": True, "action": action, "data": data}
if action == "delete":
if not notification_id or not notification_type:
raise ToolError("delete requires notification_id and notification_type")
_del_vars = {"id": notification_id, "type": notification_type.upper()}
data = await make_graphql_request(MUTATIONS["delete"], _del_vars)
return {"success": True, "action": "delete", "data": data}
if action == "delete_archived":
data = await make_graphql_request(MUTATIONS["delete_archived"])
return {"success": True, "action": "delete_archived", "data": data}
if action == "archive_all":
variables: dict[str, Any] | None = None
if importance:
variables = {"importance": importance.upper()}
data = await make_graphql_request(MUTATIONS["archive_all"], variables)
return {"success": True, "action": "archive_all", "data": data}
if action == "archive_many":
if not notification_ids:
raise ToolError("notification_ids is required for 'archive_many' action")
data = await make_graphql_request(
MUTATIONS["archive_many"], {"ids": notification_ids}
)
return {"success": True, "action": "archive_many", "data": data}
if action == "unarchive_many":
if not notification_ids:
raise ToolError("notification_ids is required for 'unarchive_many' action")
data = await make_graphql_request(
MUTATIONS["unarchive_many"], {"ids": notification_ids}
)
return {"success": True, "action": "unarchive_many", "data": data}
if action == "unarchive_all":
vars_: dict[str, Any] | None = None
if importance:
vars_ = {"importance": importance.upper()}
data = await make_graphql_request(MUTATIONS["unarchive_all"], vars_)
return {"success": True, "action": "unarchive_all", "data": data}
if action == "recalculate":
data = await make_graphql_request(MUTATIONS["recalculate"])
return {"success": True, "action": "recalculate", "data": data}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Notifications tool registered successfully")

View File

@@ -1,115 +0,0 @@
"""OIDC/SSO provider management and session validation.
Provides the `unraid_oidc` tool with 5 read-only actions for querying
OIDC provider configuration and validating sessions.
"""
from typing import Any, Literal, get_args
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
QUERIES: dict[str, str] = {
"providers": """
query GetOidcProviders {
oidcProviders {
id name clientId issuer authorizationEndpoint tokenEndpoint jwksUri
scopes authorizationRules { claim operator value }
authorizationRuleMode buttonText buttonIcon buttonVariant buttonStyle
}
}
""",
"provider": """
query GetOidcProvider($id: PrefixedID!) {
oidcProvider(id: $id) {
id name clientId issuer scopes
authorizationRules { claim operator value }
authorizationRuleMode buttonText buttonIcon
}
}
""",
"configuration": """
query GetOidcConfiguration {
oidcConfiguration {
providers { id name clientId scopes }
defaultAllowedOrigins
}
}
""",
"public_providers": """
query GetPublicOidcProviders {
publicOidcProviders { id name buttonText buttonIcon buttonVariant buttonStyle }
}
""",
"validate_session": """
query ValidateOidcSession($token: String!) {
validateOidcSession(token: $token) { valid username }
}
""",
}
ALL_ACTIONS = set(QUERIES)
OIDC_ACTIONS = Literal[
"configuration",
"provider",
"providers",
"public_providers",
"validate_session",
]
if set(get_args(OIDC_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(OIDC_ACTIONS))
_extra = set(get_args(OIDC_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"OIDC_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing: {_missing or 'none'}. Extra: {_extra or 'none'}"
)
def register_oidc_tool(mcp: FastMCP) -> None:
"""Register the unraid_oidc tool with the FastMCP instance."""
@mcp.tool()
async def unraid_oidc(
action: OIDC_ACTIONS,
provider_id: str | None = None,
token: str | None = None,
) -> dict[str, Any]:
"""Query Unraid OIDC/SSO provider configuration and validate sessions.
Actions:
providers - List all configured OIDC providers (admin only)
provider - Get a specific OIDC provider by ID (requires provider_id)
configuration - Get full OIDC configuration including default origins (admin only)
public_providers - Get public OIDC provider info for login buttons (no auth)
validate_session - Validate an OIDC session token (requires token)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action == "provider" and not provider_id:
raise ToolError("provider_id is required for 'provider' action")
if action == "validate_session" and not token:
raise ToolError("token is required for 'validate_session' action")
with tool_error_handler("oidc", action, logger):
logger.info(f"Executing unraid_oidc action={action}")
if action == "provider":
data = await make_graphql_request(QUERIES[action], {"id": provider_id})
return {"success": True, "action": action, "data": data}
if action == "validate_session":
data = await make_graphql_request(QUERIES[action], {"token": token})
return {"success": True, "action": action, "data": data}
data = await make_graphql_request(QUERIES[action])
return {"success": True, "action": action, "data": data}
logger.info("OIDC tool registered successfully")

View File

@@ -1,110 +0,0 @@
"""Plugin management for the Unraid API.
Provides the `unraid_plugins` tool with 3 actions: list, add, remove.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"list": """
query ListPlugins {
plugins { name version hasApiModule hasCliModule }
}
""",
}
MUTATIONS: dict[str, str] = {
"add": """
mutation AddPlugin($input: PluginManagementInput!) {
addPlugin(input: $input)
}
""",
"remove": """
mutation RemovePlugin($input: PluginManagementInput!) {
removePlugin(input: $input)
}
""",
}
DESTRUCTIVE_ACTIONS = {"remove"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
PLUGIN_ACTIONS = Literal["add", "list", "remove"]
if set(get_args(PLUGIN_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(PLUGIN_ACTIONS))
_extra = set(get_args(PLUGIN_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"PLUGIN_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing: {_missing or 'none'}. Extra: {_extra or 'none'}"
)
def register_plugins_tool(mcp: FastMCP) -> None:
"""Register the unraid_plugins tool with the FastMCP instance."""
@mcp.tool()
async def unraid_plugins(
action: PLUGIN_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
names: list[str] | None = None,
bundled: bool = False,
restart: bool = True,
) -> dict[str, Any]:
"""Manage Unraid API plugins.
Actions:
list - List all installed plugins with version and module info
add - Install one or more plugins (requires names: list of package names)
remove - Remove one or more plugins (requires names, confirm=True)
Parameters:
names - List of plugin package names (required for add/remove)
bundled - Whether plugins are bundled (default: False)
restart - Whether to auto-restart API after operation (default: True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
f"Remove plugin(s) **{names}** from the Unraid API. This cannot be undone without re-installing.",
)
with tool_error_handler("plugins", action, logger):
logger.info(f"Executing unraid_plugins action={action}")
if action == "list":
data = await make_graphql_request(QUERIES["list"])
return {"success": True, "action": action, "data": data}
if action in ("add", "remove"):
if not names:
raise ToolError(f"names is required for '{action}' action")
input_data = {"names": names, "bundled": bundled, "restart": restart}
mutation_key = "add" if action == "add" else "remove"
data = await make_graphql_request(MUTATIONS[mutation_key], {"input": input_data})
result_key = "addPlugin" if action == "add" else "removePlugin"
restart_required = data.get(result_key)
return {
"success": True,
"action": action,
"names": names,
"manual_restart_required": restart_required,
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Plugins tool registered successfully")

View File

@@ -1,198 +0,0 @@
"""RClone cloud storage remote management.
Provides the `unraid_rclone` tool with 4 actions for managing
cloud storage remotes (S3, Google Drive, Dropbox, FTP, etc.).
"""
import re
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"list_remotes": """
query ListRCloneRemotes {
rclone { remotes { name type parameters config } }
}
""",
"config_form": """
query GetRCloneConfigForm($formOptions: RCloneConfigFormInput) {
rclone { configForm(formOptions: $formOptions) { id dataSchema uiSchema } }
}
""",
}
MUTATIONS: dict[str, str] = {
"create_remote": """
mutation CreateRCloneRemote($input: CreateRCloneRemoteInput!) {
rclone { createRCloneRemote(input: $input) { name type parameters } }
}
""",
"delete_remote": """
mutation DeleteRCloneRemote($input: DeleteRCloneRemoteInput!) {
rclone { deleteRCloneRemote(input: $input) }
}
""",
}
DESTRUCTIVE_ACTIONS = {"delete_remote"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
RCLONE_ACTIONS = Literal[
"list_remotes",
"config_form",
"create_remote",
"delete_remote",
]
if set(get_args(RCLONE_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(RCLONE_ACTIONS))
_extra = set(get_args(RCLONE_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"RCLONE_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
# Max config entries to prevent abuse
_MAX_CONFIG_KEYS = 50
# Pattern for suspicious key names (path traversal, shell metacharacters)
_DANGEROUS_KEY_PATTERN = re.compile(r"\.\.|[/\\;|`$(){}]")
# Max length for individual config values
_MAX_VALUE_LENGTH = 4096
def _validate_config_data(config_data: dict[str, Any]) -> dict[str, str]:
"""Validate and sanitize rclone config_data before passing to GraphQL.
Ensures all keys and values are safe strings with no injection vectors.
Raises:
ToolError: If config_data contains invalid keys or values
"""
if len(config_data) > _MAX_CONFIG_KEYS:
raise ToolError(f"config_data has {len(config_data)} keys (max {_MAX_CONFIG_KEYS})")
validated: dict[str, str] = {}
for key, value in config_data.items():
if not isinstance(key, str) or not key.strip():
raise ToolError(
f"config_data keys must be non-empty strings, got: {type(key).__name__}"
)
if _DANGEROUS_KEY_PATTERN.search(key):
raise ToolError(
f"config_data key '{key}' contains disallowed characters "
f"(path traversal or shell metacharacters)"
)
if not isinstance(value, (str, int, float, bool)):
raise ToolError(
f"config_data['{key}'] must be a string, number, or boolean, "
f"got: {type(value).__name__}"
)
str_value = str(value)
if len(str_value) > _MAX_VALUE_LENGTH:
raise ToolError(
f"config_data['{key}'] value exceeds max length "
f"({len(str_value)} > {_MAX_VALUE_LENGTH})"
)
validated[key] = str_value
return validated
def register_rclone_tool(mcp: FastMCP) -> None:
"""Register the unraid_rclone tool with the FastMCP instance."""
@mcp.tool()
async def unraid_rclone(
action: RCLONE_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
name: str | None = None,
provider_type: str | None = None,
config_data: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Manage RClone cloud storage remotes.
Actions:
list_remotes - List all configured remotes
config_form - Get config form schema (optional provider_type for specific provider)
create_remote - Create a new remote (requires name, provider_type, config_data)
delete_remote - Delete a remote (requires name, confirm=True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
f"Delete rclone remote **{name}**. This cannot be undone.",
)
with tool_error_handler("rclone", action, logger):
logger.info(f"Executing unraid_rclone action={action}")
if action == "list_remotes":
data = await make_graphql_request(QUERIES["list_remotes"])
remotes = data.get("rclone", {}).get("remotes", [])
return {"remotes": list(remotes) if isinstance(remotes, list) else []}
if action == "config_form":
variables: dict[str, Any] = {}
if provider_type:
variables["formOptions"] = {"providerType": provider_type}
data = await make_graphql_request(QUERIES["config_form"], variables or None)
form = data.get("rclone", {}).get("configForm", {})
if not form:
raise ToolError("No RClone config form data received")
return dict(form)
if action == "create_remote":
if name is None or provider_type is None or config_data is None:
raise ToolError("create_remote requires name, provider_type, and config_data")
validated_config = _validate_config_data(config_data)
data = await make_graphql_request(
MUTATIONS["create_remote"],
{
"input": {
"name": name,
"type": provider_type,
"parameters": validated_config,
}
},
)
remote = data.get("rclone", {}).get("createRCloneRemote")
if not remote:
raise ToolError(
f"Failed to create remote '{name}': no confirmation from server"
)
return {
"success": True,
"message": f"Remote '{name}' created successfully",
"remote": remote,
}
if action == "delete_remote":
if not name:
raise ToolError("name is required for 'delete_remote' action")
data = await make_graphql_request(
MUTATIONS["delete_remote"], {"input": {"name": name}}
)
success = data.get("rclone", {}).get("deleteRCloneRemote", False)
if not success:
raise ToolError(f"Failed to delete remote '{name}'")
return {
"success": True,
"message": f"Remote '{name}' deleted successfully",
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("RClone tool registered successfully")

View File

@@ -1,100 +0,0 @@
"""System settings and UPS mutations.
Provides the `unraid_settings` tool with 2 actions for updating system
configuration and UPS monitoring.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
MUTATIONS: dict[str, str] = {
"update": """
mutation UpdateSettings($input: JSON!) {
updateSettings(input: $input) { restartRequired values warnings }
}
""",
"configure_ups": """
mutation ConfigureUps($config: UPSConfigInput!) {
configureUps(config: $config)
}
""",
}
DESTRUCTIVE_ACTIONS = {
"configure_ups",
}
ALL_ACTIONS = set(MUTATIONS)
SETTINGS_ACTIONS = Literal[
"configure_ups",
"update",
]
if set(get_args(SETTINGS_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(SETTINGS_ACTIONS))
_extra = set(get_args(SETTINGS_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"SETTINGS_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_settings_tool(mcp: FastMCP) -> None:
"""Register the unraid_settings tool with the FastMCP instance."""
@mcp.tool()
async def unraid_settings(
action: SETTINGS_ACTIONS,
ctx: Context | None = None,
confirm: bool = False,
settings_input: dict[str, Any] | None = None,
ups_config: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Update Unraid system settings and UPS configuration.
Actions:
update - Update system settings (requires settings_input dict)
configure_ups - Configure UPS monitoring (requires ups_config dict, confirm=True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
"Configure UPS monitoring. This will overwrite the current UPS daemon settings.",
)
with tool_error_handler("settings", action, logger):
logger.info(f"Executing unraid_settings action={action}")
if action == "update":
if settings_input is None:
raise ToolError("settings_input is required for 'update' action")
data = await make_graphql_request(MUTATIONS["update"], {"input": settings_input})
return {"success": True, "action": "update", "data": data.get("updateSettings")}
if action == "configure_ups":
if ups_config is None:
raise ToolError("ups_config is required for 'configure_ups' action")
data = await make_graphql_request(
MUTATIONS["configure_ups"], {"config": ups_config}
)
return {
"success": True,
"action": "configure_ups",
"result": data.get("configureUps"),
}
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Settings tool registered successfully")

View File

@@ -1,215 +0,0 @@
"""Storage and disk management.
Provides the `unraid_storage` tool with 6 actions for shares, physical disks,
log files, and log content retrieval.
"""
import os
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import DISK_TIMEOUT, make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
from ..core.utils import format_bytes
_ALLOWED_LOG_PREFIXES = ("/var/log/", "/boot/logs/", "/mnt/")
_MAX_TAIL_LINES = 10_000
QUERIES: dict[str, str] = {
"shares": """
query GetSharesInfo {
shares {
id name free used size include exclude cache nameOrig
comment allocator splitLevel floor cow color luksStatus
}
}
""",
"disks": """
query ListPhysicalDisks {
disks { id device name }
}
""",
"disk_details": """
query GetDiskDetails($id: PrefixedID!) {
disk(id: $id) {
id device name serialNum size temperature
}
}
""",
"log_files": """
query ListLogFiles {
logFiles { name path size modifiedAt }
}
""",
"logs": """
query GetLogContent($path: String!, $lines: Int) {
logFile(path: $path, lines: $lines) {
path content totalLines startLine
}
}
""",
}
MUTATIONS: dict[str, str] = {
"flash_backup": """
mutation InitiateFlashBackup($input: InitiateFlashBackupInput!) {
initiateFlashBackup(input: $input) { status jobId }
}
""",
}
DESTRUCTIVE_ACTIONS = {"flash_backup"}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
STORAGE_ACTIONS = Literal[
"shares",
"disks",
"disk_details",
"log_files",
"logs",
"flash_backup",
]
if set(get_args(STORAGE_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(STORAGE_ACTIONS))
_extra = set(get_args(STORAGE_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"STORAGE_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_storage_tool(mcp: FastMCP) -> None:
"""Register the unraid_storage tool with the FastMCP instance."""
@mcp.tool()
async def unraid_storage(
action: STORAGE_ACTIONS,
ctx: Context | None = None,
disk_id: str | None = None,
log_path: str | None = None,
tail_lines: int = 100,
confirm: bool = False,
remote_name: str | None = None,
source_path: str | None = None,
destination_path: str | None = None,
backup_options: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Manage Unraid storage, disks, and logs.
Actions:
shares - List all user shares with capacity info
disks - List all physical disks
disk_details - Detailed SMART info for a disk (requires disk_id)
log_files - List available log files
logs - Retrieve log content (requires log_path, optional tail_lines)
flash_backup - Initiate flash backup via rclone (requires remote_name, source_path, destination_path, confirm=True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
f"Back up flash drive to **{remote_name}:{destination_path}**. "
"Existing backups at this destination will be overwritten.",
)
if action == "disk_details" and not disk_id:
raise ToolError("disk_id is required for 'disk_details' action")
if action == "logs" and (tail_lines < 1 or tail_lines > _MAX_TAIL_LINES):
raise ToolError(f"tail_lines must be between 1 and {_MAX_TAIL_LINES}, got {tail_lines}")
if action == "logs":
if not log_path:
raise ToolError("log_path is required for 'logs' action")
# Resolve path synchronously to prevent traversal attacks.
# Using os.path.realpath instead of anyio.Path.resolve() because the
# async variant blocks on NFS-mounted paths under /mnt/ (Perf-AI-1).
normalized = os.path.realpath(log_path) # noqa: ASYNC240
if not any(normalized.startswith(p) for p in _ALLOWED_LOG_PREFIXES):
raise ToolError(
f"log_path must start with one of: {', '.join(_ALLOWED_LOG_PREFIXES)}. "
f"Use log_files action to discover valid paths."
)
log_path = normalized
if action == "flash_backup":
if not remote_name:
raise ToolError("remote_name is required for 'flash_backup' action")
if not source_path:
raise ToolError("source_path is required for 'flash_backup' action")
if not destination_path:
raise ToolError("destination_path is required for 'flash_backup' action")
input_data: dict[str, Any] = {
"remoteName": remote_name,
"sourcePath": source_path,
"destinationPath": destination_path,
}
if backup_options is not None:
input_data["options"] = backup_options
with tool_error_handler("storage", action, logger):
logger.info("Executing unraid_storage action=flash_backup")
data = await make_graphql_request(MUTATIONS["flash_backup"], {"input": input_data})
backup = data.get("initiateFlashBackup")
if not backup:
raise ToolError("Failed to start flash backup: no confirmation from server")
return {
"success": True,
"action": "flash_backup",
"data": backup,
}
query = QUERIES[action]
variables: dict[str, Any] | None = None
custom_timeout = DISK_TIMEOUT if action in ("disks", "disk_details") else None
if action == "disk_details":
variables = {"id": disk_id}
elif action == "logs":
variables = {"path": log_path, "lines": tail_lines}
with tool_error_handler("storage", action, logger):
logger.info(f"Executing unraid_storage action={action}")
data = await make_graphql_request(query, variables, custom_timeout=custom_timeout)
if action == "shares":
return {"shares": data.get("shares", [])}
if action == "disks":
return {"disks": data.get("disks", [])}
if action == "disk_details":
raw = data.get("disk", {})
if not raw:
raise ToolError(f"Disk '{disk_id}' not found")
summary = {
"disk_id": raw.get("id"),
"device": raw.get("device"),
"name": raw.get("name"),
"serial_number": raw.get("serialNum"),
"size_formatted": format_bytes(raw.get("size")),
"temperature": (
f"{raw['temperature']}\u00b0C"
if raw.get("temperature") is not None
else "N/A"
),
}
return {"summary": summary, "details": raw}
if action == "log_files":
return {"log_files": data.get("logFiles", [])}
if action == "logs":
return dict(data.get("logFile") or {})
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("Storage tool registered successfully")

1891
unraid_mcp/tools/unraid.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,51 +0,0 @@
"""User account query.
Provides the `unraid_users` tool with 1 action for querying the current authenticated user.
Note: Unraid GraphQL API does not support user management operations (list, add, delete).
"""
from typing import Any, Literal
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
QUERIES: dict[str, str] = {
"me": """
query GetMe {
me { id name description roles }
}
""",
}
ALL_ACTIONS = set(QUERIES)
USER_ACTIONS = Literal["me"]
def register_users_tool(mcp: FastMCP) -> None:
"""Register the unraid_users tool with the FastMCP instance."""
@mcp.tool()
async def unraid_users(
action: USER_ACTIONS = "me",
) -> dict[str, Any]:
"""Query current authenticated user.
Actions:
me - Get current authenticated user info (id, name, description, roles)
Note: Unraid API does not support user management operations (list, add, delete).
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
with tool_error_handler("users", action, logger):
logger.info("Executing unraid_users action=me")
data = await make_graphql_request(QUERIES["me"])
return data.get("me") or {}
logger.info("Users tool registered successfully")

View File

@@ -1,165 +0,0 @@
"""Virtual machine management.
Provides the `unraid_vm` tool with 9 actions for VM lifecycle management
including start, stop, pause, resume, force stop, reboot, and reset.
"""
from typing import Any, Literal, get_args
from fastmcp import Context, FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError, tool_error_handler
from ..core.guards import gate_destructive_action
QUERIES: dict[str, str] = {
"list": """
query ListVMs {
vms { id domains { id name state uuid } }
}
""",
# NOTE: The Unraid GraphQL API does not expose a single-VM query.
# The details query is identical to list; client-side filtering is required.
"details": """
query ListVMs {
vms { id domains { id name state uuid } }
}
""",
}
MUTATIONS: dict[str, str] = {
"start": """
mutation StartVM($id: PrefixedID!) { vm { start(id: $id) } }
""",
"stop": """
mutation StopVM($id: PrefixedID!) { vm { stop(id: $id) } }
""",
"pause": """
mutation PauseVM($id: PrefixedID!) { vm { pause(id: $id) } }
""",
"resume": """
mutation ResumeVM($id: PrefixedID!) { vm { resume(id: $id) } }
""",
"force_stop": """
mutation ForceStopVM($id: PrefixedID!) { vm { forceStop(id: $id) } }
""",
"reboot": """
mutation RebootVM($id: PrefixedID!) { vm { reboot(id: $id) } }
""",
"reset": """
mutation ResetVM($id: PrefixedID!) { vm { reset(id: $id) } }
""",
}
# Map action names to GraphQL field names (only where they differ)
_MUTATION_FIELDS: dict[str, str] = {
"force_stop": "forceStop",
}
DESTRUCTIVE_ACTIONS = {"force_stop", "reset"}
VM_ACTIONS = Literal[
"list",
"details",
"start",
"stop",
"pause",
"resume",
"force_stop",
"reboot",
"reset",
]
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
if set(get_args(VM_ACTIONS)) != ALL_ACTIONS:
_missing = ALL_ACTIONS - set(get_args(VM_ACTIONS))
_extra = set(get_args(VM_ACTIONS)) - ALL_ACTIONS
raise RuntimeError(
f"VM_ACTIONS and ALL_ACTIONS are out of sync. "
f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}"
)
def register_vm_tool(mcp: FastMCP) -> None:
"""Register the unraid_vm tool with the FastMCP instance."""
@mcp.tool()
async def unraid_vm(
action: VM_ACTIONS,
ctx: Context | None = None,
vm_id: str | None = None,
confirm: bool = False,
) -> dict[str, Any]:
"""Manage Unraid virtual machines.
Actions:
list - List all VMs with state
details - Detailed info for a VM (requires vm_id: UUID, PrefixedID, or name)
start - Start a VM (requires vm_id)
stop - Gracefully stop a VM (requires vm_id)
pause - Pause a VM (requires vm_id)
resume - Resume a paused VM (requires vm_id)
force_stop - Force stop a VM (requires vm_id, confirm=True)
reboot - Reboot a VM (requires vm_id)
reset - Reset a VM (requires vm_id, confirm=True)
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
if action != "list" and not vm_id:
raise ToolError(f"vm_id is required for '{action}' action")
await gate_destructive_action(
ctx,
action,
DESTRUCTIVE_ACTIONS,
confirm,
{
"force_stop": f"Force stop VM **{vm_id}**. Unsaved data may be lost.",
"reset": f"Reset VM **{vm_id}**. This is a hard reset — unsaved data may be lost.",
},
)
with tool_error_handler("vm", action, logger):
logger.info(f"Executing unraid_vm action={action}")
if action == "list":
data = await make_graphql_request(QUERIES["list"])
if data.get("vms"):
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
if isinstance(vms, dict):
vms = [vms]
return {"vms": vms}
return {"vms": []}
if action == "details":
data = await make_graphql_request(QUERIES["details"])
if not data.get("vms"):
raise ToolError("No VM data returned from server")
vms = data["vms"].get("domains") or data["vms"].get("domain") or []
if isinstance(vms, dict):
vms = [vms]
for vm in vms:
if vm.get("uuid") == vm_id or vm.get("id") == vm_id or vm.get("name") == vm_id:
return dict(vm)
available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms]
raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}")
# Mutations
if action in MUTATIONS:
data = await make_graphql_request(MUTATIONS[action], {"id": vm_id})
field = _MUTATION_FIELDS.get(action, action)
if data.get("vm") and field in data["vm"]:
return {
"success": data["vm"][field],
"action": action,
"vm_id": vm_id,
}
raise ToolError(f"Failed to {action} VM or unexpected response")
raise ToolError(f"Unhandled action '{action}' — this is a bug")
logger.info("VM tool registered successfully")