refactor: remove Docker and HTTP transport support, fix hypothesis cache directory

This commit is contained in:
Jacob Magar
2026-03-24 19:22:27 -04:00
parent e68d4a80e4
commit e548f6e6c9
39 changed files with 369 additions and 1757 deletions

View File

@@ -41,7 +41,7 @@ Query and monitor Unraid servers via GraphQL API - array status, disk health, co
- Notification management - Notification management
- Plugin, rclone, API key, and OIDC management - Plugin, rclone, API key, and OIDC management
**Version:** 1.0.0 **Version:** 1.1.2
**Category:** Infrastructure **Category:** Infrastructure
**Tags:** unraid, monitoring, homelab, graphql, docker, virtualization **Tags:** unraid, monitoring, homelab, graphql, docker, virtualization

View File

@@ -1,12 +1,12 @@
{ {
"name": "jmagar-unraid-mcp", "name": "unraid-mcp",
"owner": { "owner": {
"name": "jmagar", "name": "jmagar",
"email": "jmagar@users.noreply.github.com" "email": "jmagar@users.noreply.github.com"
}, },
"metadata": { "metadata": {
"description": "Comprehensive Unraid server management and monitoring via a single consolidated MCP tool (~108 actions across 15 domains)", "description": "Comprehensive Unraid server management and monitoring via a single consolidated MCP tool (~108 actions across 15 domains)",
"version": "1.0.0", "version": "1.1.2",
"homepage": "https://github.com/jmagar/unraid-mcp", "homepage": "https://github.com/jmagar/unraid-mcp",
"repository": "https://github.com/jmagar/unraid-mcp" "repository": "https://github.com/jmagar/unraid-mcp"
}, },
@@ -14,8 +14,8 @@
{ {
"name": "unraid", "name": "unraid",
"source": "./", "source": "./",
"description": "Query and monitor Unraid servers via GraphQL API — single `unraid` tool with action+subaction routing for array, disk, docker, VM, notifications, live metrics, and more", "description": "Query, monitor, and manage Unraid servers via GraphQL API — single `unraid` tool with action+subaction routing for array, disk, docker, VM, notifications, live metrics, and more",
"version": "1.0.0", "version": "1.1.2",
"tags": ["unraid", "monitoring", "homelab", "graphql", "docker", "virtualization"], "tags": ["unraid", "monitoring", "homelab", "graphql", "docker", "virtualization"],
"category": "infrastructure" "category": "infrastructure"
} }

View File

@@ -1,6 +1,6 @@
{ {
"name": "unraid", "name": "unraid",
"description": "Query and monitor Unraid servers via GraphQL API - array status, disk health, containers, VMs, system monitoring", "description": "Query, monitor, and manage Unraid servers via GraphQL API - array status, disk health, containers, VMs, system monitoring",
"version": "1.1.2", "version": "1.1.2",
"author": { "author": {
"name": "jmagar", "name": "jmagar",

View File

@@ -1,31 +0,0 @@
Dockerfile
.dockerignore
.git
.gitignore
__pycache__
*.pyc
*.pyo
*.pyd
.env
.env.local
.env.*
*.log
logs/
*.db
*.sqlite3
instance/
.pytest_cache/
.ty_cache/
.venv/
venv/
env/
.vscode/
cline_docs/
tests/
docs/
scripts/
commands/
.full-review/
.claude-plugin/
*.md
!README.md

View File

@@ -8,7 +8,10 @@ UNRAID_API_KEY=your_unraid_api_key
# MCP Server Settings # MCP Server Settings
# ------------------- # -------------------
UNRAID_MCP_TRANSPORT=streamable-http # Options: streamable-http (recommended), sse (deprecated), stdio # Default transport is stdio (for Claude Desktop / local use).
# Docker Compose overrides this to streamable-http automatically.
# Options: stdio (default), streamable-http, sse (deprecated)
UNRAID_MCP_TRANSPORT=stdio
UNRAID_MCP_HOST=0.0.0.0 UNRAID_MCP_HOST=0.0.0.0
UNRAID_MCP_PORT=6970 UNRAID_MCP_PORT=6970
@@ -41,41 +44,15 @@ UNRAID_MAX_RECONNECT_ATTEMPTS=10
# Override the credentials directory (default: ~/.unraid-mcp/) # Override the credentials directory (default: ~/.unraid-mcp/)
# UNRAID_CREDENTIALS_DIR=/custom/path/to/credentials # UNRAID_CREDENTIALS_DIR=/custom/path/to/credentials
# Google OAuth Protection (Optional) # Authentication
# ----------------------------------- # --------------
# Protects the MCP HTTP server — clients must authenticate with Google before calling tools. # This server has NO built-in authentication.
# Requires streamable-http or sse transport (not stdio). # When running as HTTP (streamable-http transport), protect the endpoint with
# an external OAuth gateway or identity-aware proxy:
# #
# Setup: # Reverse proxy with auth: nginx + OAuth2-proxy, Caddy + forward auth
# 1. Google Cloud Console → APIs & Services → Credentials # Identity-aware proxy: Authelia, Authentik, Pomerium
# 2. Create OAuth 2.0 Client ID (Web application) # Network isolation: bind to 127.0.0.1, use VPN/Tailscale for access
# 3. Authorized redirect URIs: <UNRAID_MCP_BASE_URL>/auth/callback # Firewall rules: restrict source IPs at the network layer
# 4. Copy Client ID and Client Secret below
# #
# UNRAID_MCP_BASE_URL: Public URL clients use to reach THIS server (for redirect URIs). # stdio transport (default) is inherently local — no network exposure.
# Examples:
# http://10.1.0.2:6970 (LAN)
# http://100.x.x.x:6970 (Tailscale)
# https://mcp.yourdomain.com (reverse proxy)
#
# UNRAID_MCP_JWT_SIGNING_KEY: Stable secret for signing FastMCP JWT tokens.
# Generate once: python3 -c "import secrets; print(secrets.token_hex(32))"
# NEVER change after first use — all client sessions will be invalidated.
#
# Leave GOOGLE_CLIENT_ID empty to disable OAuth (server runs unprotected).
# GOOGLE_CLIENT_ID=
# GOOGLE_CLIENT_SECRET=
# UNRAID_MCP_BASE_URL=http://10.1.0.2:6970
# UNRAID_MCP_JWT_SIGNING_KEY=<generate with command above>
# API Key Authentication (Optional)
# -----------------------------------
# Alternative to Google OAuth — clients present this key as a bearer token:
# Authorization: Bearer <UNRAID_MCP_API_KEY>
#
# Can be the same value as UNRAID_API_KEY (reuse your Unraid key), or a
# separate dedicated secret. Set both GOOGLE_CLIENT_ID and UNRAID_MCP_API_KEY
# to accept either auth method (MultiAuth).
#
# Leave empty to disable API key auth.
# UNRAID_MCP_API_KEY=

View File

@@ -45,9 +45,7 @@ jobs:
version: "0.9.25" version: "0.9.25"
- name: Install dependencies - name: Install dependencies
run: uv sync --group dev run: uv sync --group dev
- name: Run tests (excluding integration/slow) - name: Run tests with coverage (excluding integration/slow)
run: uv run pytest -m "not slow and not integration" --tb=short -q
- name: Check coverage
run: uv run pytest -m "not slow and not integration" --cov=unraid_mcp --cov-report=term-missing --tb=short -q run: uv run pytest -m "not slow and not integration" --cov=unraid_mcp --cov-report=term-missing --tb=short -q
version-sync: version-sync:

View File

@@ -28,6 +28,12 @@ All notable changes to this project are documented here.
- Added `# noqa: ASYNC109` to `timeout` parameters in `_handle_live` and `unraid()` (valid suppressions) - Added `# noqa: ASYNC109` to `timeout` parameters in `_handle_live` and `unraid()` (valid suppressions)
- Fixed `start_array*``start_array` in tool docstring table (`start_array` is not in `_ARRAY_DESTRUCTIVE`) - Fixed `start_array*``start_array` in tool docstring table (`start_array` is not in `_ARRAY_DESTRUCTIVE`)
### Refactored
- **Path validation**: Extracted `_validate_path()` in `unraid.py` — consolidates traversal check, `normpath`, and prefix validation used by both `disk/logs` and `live/log_tail` into one place; eliminates duplication
- **WebSocket auth payload**: Extracted `build_connection_init()` in `subscriptions/utils.py` — removes 4 duplicate `connection_init` blocks from `snapshot.py` (×2), `manager.py`, and `diagnostics.py`; also fixes a bug in `diagnostics.py` where `x-api-key: None` was sent when no API key was configured
- Removed `_LIVE_ALLOWED_LOG_PREFIXES` alias — direct reference to `_ALLOWED_LOG_PREFIXES`
- Moved `import hmac` to module level in `server.py` (was inside `verify_token` hot path)
--- ---
## [1.1.1] - 2026-03-16 ## [1.1.1] - 2026-03-16

View File

@@ -38,21 +38,6 @@ uv run ty check unraid_mcp/
uv run pytest uv run pytest
``` ```
### Docker Development
```bash
# Build the Docker image
docker build -t unraid-mcp-server .
# Run with Docker Compose
docker compose up -d
# View logs
docker compose logs -f unraid-mcp
# Stop service
docker compose down
```
### Environment Setup ### Environment Setup
Copy `.env.example` to `.env` and configure: Copy `.env.example` to `.env` and configure:
@@ -61,9 +46,6 @@ Copy `.env.example` to `.env` and configure:
- `UNRAID_API_KEY`: Unraid API key - `UNRAID_API_KEY`: Unraid API key
**Server:** **Server:**
- `UNRAID_MCP_TRANSPORT`: Transport type (default: streamable-http)
- `UNRAID_MCP_PORT`: Server port (default: 6970)
- `UNRAID_MCP_HOST`: Server host (default: 0.0.0.0)
- `UNRAID_MCP_LOG_LEVEL`: Log verbosity (default: INFO) - `UNRAID_MCP_LOG_LEVEL`: Log verbosity (default: INFO)
- `UNRAID_MCP_LOG_FILE`: Log filename in logs/ (default: unraid-mcp.log) - `UNRAID_MCP_LOG_FILE`: Log filename in logs/ (default: unraid-mcp.log)
@@ -77,36 +59,6 @@ Copy `.env.example` to `.env` and configure:
**Credentials override:** **Credentials override:**
- `UNRAID_CREDENTIALS_DIR`: Override the `~/.unraid-mcp/` credentials directory path - `UNRAID_CREDENTIALS_DIR`: Override the `~/.unraid-mcp/` credentials directory path
### Authentication (Optional — protects the HTTP server)
Two independent methods. Use either or both — when both are set, `MultiAuth` accepts either.
**Google OAuth** — requires all three vars:
| Env Var | Purpose |
|---------|---------|
| `GOOGLE_CLIENT_ID` | Google OAuth 2.0 Client ID |
| `GOOGLE_CLIENT_SECRET` | Google OAuth 2.0 Client Secret |
| `UNRAID_MCP_BASE_URL` | Public URL of this server (e.g. `http://10.1.0.2:6970`) |
| `UNRAID_MCP_JWT_SIGNING_KEY` | Stable 32+ char secret — prevents token invalidation on restart |
Google Cloud Console setup: APIs & Services → Credentials → OAuth 2.0 Client ID (Web application) → Authorized redirect URIs: `<UNRAID_MCP_BASE_URL>/auth/callback`
**API Key** — clients present as `Authorization: Bearer <key>`:
| Env Var | Purpose |
|---------|---------|
| `UNRAID_MCP_API_KEY` | Static bearer token (can be same value as `UNRAID_API_KEY`) |
**Generate a stable JWT signing key:**
```bash
python3 -c "import secrets; print(secrets.token_hex(32))"
```
**Omit all auth vars to run without auth** (default — open server).
**Full guide:** [`docs/AUTHENTICATION.md`](docs/AUTHENTICATION.md)
## Architecture ## Architecture
### Core Components ### Core Components
@@ -114,10 +66,13 @@ python3 -c "import secrets; print(secrets.token_hex(32))"
- **Entry Point**: `unraid_mcp/main.py` - Application entry point and startup logic - **Entry Point**: `unraid_mcp/main.py` - Application entry point and startup logic
- **Configuration**: `unraid_mcp/config/` - Settings management and logging configuration - **Configuration**: `unraid_mcp/config/` - Settings management and logging configuration
- **Core Infrastructure**: `unraid_mcp/core/` - GraphQL client, exceptions, and shared types - **Core Infrastructure**: `unraid_mcp/core/` - GraphQL client, exceptions, and shared types
- `guards.py` — destructive action gating via MCP elicitation
- `utils.py` — shared helpers (`safe_get`, `safe_display_url`, path validation)
- `setup.py` — elicitation-based credential setup flow
- **Subscriptions**: `unraid_mcp/subscriptions/` - Real-time WebSocket subscriptions and diagnostics - **Subscriptions**: `unraid_mcp/subscriptions/` - Real-time WebSocket subscriptions and diagnostics
- **Tools**: `unraid_mcp/tools/` - Domain-specific tool implementations - **Tools**: `unraid_mcp/tools/` - Domain-specific tool implementations
- **GraphQL Client**: Uses httpx for async HTTP requests to Unraid API - **GraphQL Client**: Uses httpx for async HTTP requests to Unraid API
- **Transport Layer**: Supports streamable-http (recommended), SSE (deprecated), and stdio - **Version Helper**: `unraid_mcp/version.py` - Reads version from package metadata via importlib
### Key Design Patterns ### Key Design Patterns
- **Consolidated Action Pattern**: Each tool uses `action: Literal[...]` parameter to expose multiple operations via a single MCP tool, reducing context window usage - **Consolidated Action Pattern**: Each tool uses `action: Literal[...]` parameter to expose multiple operations via a single MCP tool, reducing context window usage
@@ -165,26 +120,20 @@ The server registers **3 MCP tools**:
### Destructive Actions (require `confirm=True`) ### Destructive Actions (require `confirm=True`)
- **array**: stop_array, remove_disk, clear_disk_stats - **array**: stop_array, remove_disk, clear_disk_stats
- **vm**: force_stop, reset - **vm**: force_stop, reset
- **notifications**: delete, delete_archived - **notification**: delete, delete_archived
- **rclone**: delete_remote - **rclone**: delete_remote
- **keys**: delete - **key**: delete
- **disk**: flash_backup - **disk**: flash_backup
- **settings**: configure_ups - **setting**: configure_ups
- **plugins**: remove - **plugin**: remove
### Environment Variable Hierarchy ### Environment Variable Hierarchy
The server loads environment variables from multiple locations in order: The server loads environment variables from multiple locations in order:
1. `~/.unraid-mcp/.env` (primary — canonical credentials dir, all runtimes) 1. `~/.unraid-mcp/.env` (primary — canonical credentials dir, all runtimes)
2. `~/.unraid-mcp/.env.local` (local overrides, only used if primary is absent) 2. `~/.unraid-mcp/.env.local` (local overrides, only used if primary is absent)
3. `/app/.env.local` (Docker container mount) 3. `../.env.local` (project root local overrides)
4. `../.env.local` (project root local overrides) 4. `../.env` (project root fallback)
5. `../.env` (project root fallback) 5. `unraid_mcp/.env` (last resort)
6. `unraid_mcp/.env` (last resort)
### Transport Configuration
- **streamable-http** (recommended): HTTP-based transport on `/mcp` endpoint
- **sse** (deprecated): Server-Sent Events transport
- **stdio**: Standard input/output for direct integration
### Error Handling Strategy ### Error Handling Strategy
- GraphQL errors are converted to ToolError with descriptive messages - GraphQL errors are converted to ToolError with descriptive messages
@@ -192,6 +141,14 @@ The server loads environment variables from multiple locations in order:
- Network errors are caught and wrapped with connection context - Network errors are caught and wrapped with connection context
- All errors are logged with full context for debugging - All errors are logged with full context for debugging
### Middleware Chain
`server.py` wraps all tools in a 5-layer stack (order matters — outermost first):
1. **LoggingMiddleware** — logs every `tools/call` and `resources/read` with duration
2. **ErrorHandlingMiddleware** — converts unhandled exceptions to proper MCP errors
3. **SlidingWindowRateLimitingMiddleware** — 540 req/min sliding window
4. **ResponseLimitingMiddleware** — truncates responses > 512 KB with a clear suffix
5. **ResponseCachingMiddleware** — caching disabled entirely for `unraid` tool (mutations and reads share one tool name, so no per-subaction exclusion is possible)
### Performance Considerations ### Performance Considerations
- Increased timeouts for disk operations (90s read timeout) - Increased timeouts for disk operations (90s read timeout)
- Selective queries to avoid GraphQL type overflow issues - Selective queries to avoid GraphQL type overflow issues
@@ -216,7 +173,9 @@ tests/
├── http_layer/ # httpx-level request/response tests (respx) ├── http_layer/ # httpx-level request/response tests (respx)
├── integration/ # WebSocket subscription lifecycle tests (slow) ├── integration/ # WebSocket subscription lifecycle tests (slow)
├── safety/ # Destructive action guard tests ├── safety/ # Destructive action guard tests
── schema/ # GraphQL query validation (99 tests, all passing) ── schema/ # GraphQL query validation (119 tests)
├── contract/ # Response shape contract tests
└── property/ # Input validation property-based tests
``` ```
### Running Targeted Tests ### Running Targeted Tests
@@ -244,6 +203,8 @@ See `tests/mcporter/README.md` for transport differences and `docs/DESTRUCTIVE_A
### API Reference Docs ### API Reference Docs
- `docs/UNRAID_API_COMPLETE_REFERENCE.md` — Full GraphQL schema reference - `docs/UNRAID_API_COMPLETE_REFERENCE.md` — Full GraphQL schema reference
- `docs/UNRAID_API_OPERATIONS.md` — All supported operations with examples - `docs/UNRAID_API_OPERATIONS.md` — All supported operations with examples
- `docs/MARKETPLACE.md` — Plugin marketplace listing and publishing guide
- `docs/PUBLISHING.md` — Step-by-step instructions for publishing to Claude plugin registry
Use these when adding new queries/mutations. Use these when adding new queries/mutations.
@@ -253,12 +214,11 @@ When bumping the version, **always update both files** — they must stay in syn
- `.claude-plugin/plugin.json``"version": "X.Y.Z"` - `.claude-plugin/plugin.json``"version": "X.Y.Z"`
### Credential Storage (`~/.unraid-mcp/.env`) ### Credential Storage (`~/.unraid-mcp/.env`)
All runtimes (plugin, direct, Docker) load credentials from `~/.unraid-mcp/.env`. All runtimes (plugin, direct `uv run`) load credentials from `~/.unraid-mcp/.env`.
- **Plugin/direct:** `unraid action=health subaction=setup` writes this file automatically via elicitation, - **Plugin/direct:** `unraid action=health subaction=setup` writes this file automatically via elicitation,
**Safe to re-run**: always prompts for confirmation before overwriting existing credentials, **Safe to re-run**: always prompts for confirmation before overwriting existing credentials,
whether the connection is working or not (failed probe may be a transient outage, not bad creds). whether the connection is working or not (failed probe may be a transient outage, not bad creds).
or manual: `mkdir -p ~/.unraid-mcp && cp .env.example ~/.unraid-mcp/.env` then edit. or manual: `mkdir -p ~/.unraid-mcp && cp .env.example ~/.unraid-mcp/.env` then edit.
- **Docker:** `docker-compose.yml` loads it via `env_file` before container start.
- **No symlinks needed.** Version bumps do not affect this path. - **No symlinks needed.** Version bumps do not affect this path.
- **Permissions:** dir=700, file=600 (set automatically by elicitation; set manually if - **Permissions:** dir=700, file=600 (set automatically by elicitation; set manually if
using `cp`: `chmod 700 ~/.unraid-mcp && chmod 600 ~/.unraid-mcp/.env`). using `cp`: `chmod 700 ~/.unraid-mcp && chmod 600 ~/.unraid-mcp/.env`).

View File

@@ -1,48 +0,0 @@
# Use an official Python runtime as a parent image
FROM python:3.12-slim
# Set the working directory in the container
WORKDIR /app
# Install uv (pinned tag to avoid mutable latest)
COPY --from=ghcr.io/astral-sh/uv:0.9.25 /uv /uvx /usr/local/bin/
# Create non-root user with home directory and give ownership of /app
RUN groupadd --gid 1000 appuser && \
useradd --uid 1000 --gid 1000 --create-home --shell /bin/false appuser && \
chown appuser:appuser /app
# Copy dependency files (owned by appuser via --chown)
COPY --chown=appuser:appuser pyproject.toml .
COPY --chown=appuser:appuser uv.lock .
COPY --chown=appuser:appuser README.md .
COPY --chown=appuser:appuser LICENSE .
# Copy the source code
COPY --chown=appuser:appuser unraid_mcp/ ./unraid_mcp/
# Switch to non-root user before installing dependencies
USER appuser
# Install dependencies and the package
RUN uv sync --frozen
# Make port UNRAID_MCP_PORT available to the world outside this container
# Defaulting to 6970, but can be overridden by environment variable
EXPOSE 6970
# Define environment variables (defaults, can be overridden at runtime)
ENV UNRAID_MCP_PORT=6970
ENV UNRAID_MCP_HOST="0.0.0.0"
ENV UNRAID_MCP_TRANSPORT="streamable-http"
ENV UNRAID_API_URL=""
ENV UNRAID_API_KEY=""
ENV UNRAID_VERIFY_SSL="true"
ENV UNRAID_MCP_LOG_LEVEL="INFO"
# Health check
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD ["python", "-c", "import os, urllib.request; port = os.getenv('UNRAID_MCP_PORT', '6970'); urllib.request.urlopen(f'http://localhost:{port}/mcp')"]
# Run unraid-mcp-server when the container launches
CMD ["uv", "run", "unraid-mcp-server"]

140
README.md
View File

@@ -13,8 +13,7 @@
-**High Performance**: Async/concurrent operations with optimized timeouts -**High Performance**: Async/concurrent operations with optimized timeouts
- 🔄 **Real-time Data**: WebSocket subscriptions for live metrics, logs, array state, and more - 🔄 **Real-time Data**: WebSocket subscriptions for live metrics, logs, array state, and more
- 📊 **Health Monitoring**: Comprehensive system diagnostics and status - 📊 **Health Monitoring**: Comprehensive system diagnostics and status
- 🐳 **Docker Ready**: Full containerization support with Docker Compose - 🔒 **Secure**: Network-layer isolation
- 🔒 **Secure**: Optional Google OAuth 2.0 authentication + SSL/TLS + API key management
- 📝 **Rich Logging**: Structured logging with rotation and multiple levels - 📝 **Rich Logging**: Structured logging with rotation and multiple levels
--- ---
@@ -25,7 +24,7 @@
- [Quick Start](#-quick-start) - [Quick Start](#-quick-start)
- [Installation](#-installation) - [Installation](#-installation)
- [Configuration](#-configuration) - [Configuration](#-configuration)
- [Google OAuth](#-google-oauth-optional) - [Authentication](#-authentication)
- [Available Tools & Resources](#-available-tools--resources) - [Available Tools & Resources](#-available-tools--resources)
- [Development](#-development) - [Development](#-development)
- [Architecture](#-architecture) - [Architecture](#-architecture)
@@ -56,7 +55,7 @@ This provides instant access to Unraid monitoring and management through Claude
### ⚙️ Credential Setup ### ⚙️ Credential Setup
Credentials are stored in `~/.unraid-mcp/.env` — one location that works for the Credentials are stored in `~/.unraid-mcp/.env` — one location that works for the
Claude Code plugin, direct `uv run` invocations, and Docker. Claude Code plugin and direct `uv run` invocations.
**Option 1 — Interactive (Claude Code plugin, elicitation-supported clients):** **Option 1 — Interactive (Claude Code plugin, elicitation-supported clients):**
``` ```
@@ -74,9 +73,6 @@ cp .env.example ~/.unraid-mcp/.env && chmod 600 ~/.unraid-mcp/.env
# UNRAID_API_KEY=your-key-from-unraid-settings # UNRAID_API_KEY=your-key-from-unraid-settings
``` ```
**Docker:** `~/.unraid-mcp/.env` is loaded via `env_file` in `docker-compose.yml`
same file, no duplication needed.
> **Finding your API key:** Unraid → Settings → Management Access → API Keys > **Finding your API key:** Unraid → Settings → Management Access → API Keys
--- ---
@@ -84,8 +80,7 @@ same file, no duplication needed.
## 🚀 Quick Start ## 🚀 Quick Start
### Prerequisites ### Prerequisites
- Docker and Docker Compose (recommended) - Python 3.12+ with [uv](https://github.com/astral-sh/uv) for development
- OR Python 3.12+ with [uv](https://github.com/astral-sh/uv) for development
- Unraid server with GraphQL API enabled - Unraid server with GraphQL API enabled
### 1. Clone Repository ### 1. Clone Repository
@@ -96,7 +91,7 @@ cd unraid-mcp
### 2. Configure Environment ### 2. Configure Environment
```bash ```bash
# For Docker/production use — canonical credential location (all runtimes) # Canonical credential location (all runtimes)
mkdir -p ~/.unraid-mcp && chmod 700 ~/.unraid-mcp mkdir -p ~/.unraid-mcp && chmod 700 ~/.unraid-mcp
cp .env.example ~/.unraid-mcp/.env && chmod 600 ~/.unraid-mcp/.env cp .env.example ~/.unraid-mcp/.env && chmod 600 ~/.unraid-mcp/.env
# Edit ~/.unraid-mcp/.env with your values # Edit ~/.unraid-mcp/.env with your values
@@ -105,16 +100,7 @@ cp .env.example ~/.unraid-mcp/.env && chmod 600 ~/.unraid-mcp/.env
cp .env.example .env cp .env.example .env
``` ```
### 3. Deploy with Docker (Recommended) ### 3. Run for Development
```bash
# Start with Docker Compose
docker compose up -d
# View logs
docker compose logs -f unraid-mcp
```
### OR 3. Run for Development
```bash ```bash
# Install dependencies # Install dependencies
uv sync uv sync
@@ -148,38 +134,6 @@ unraid-mcp/ # ${CLAUDE_PLUGIN_ROOT}
## 📦 Installation ## 📦 Installation
### 🐳 Docker Deployment (Recommended)
The easiest way to run the Unraid MCP Server is with Docker:
```bash
# Clone repository
git clone https://github.com/jmagar/unraid-mcp
cd unraid-mcp
# Set required environment variables
export UNRAID_API_URL="http://your-unraid-server/graphql"
export UNRAID_API_KEY="your_api_key_here"
# Deploy with Docker Compose
docker compose up -d
# View logs
docker compose logs -f unraid-mcp
```
#### Manual Docker Build
```bash
# Build and run manually
docker build -t unraid-mcp-server .
docker run -d --name unraid-mcp \
--restart unless-stopped \
-p 6970:6970 \
-e UNRAID_API_URL="http://your-unraid-server/graphql" \
-e UNRAID_API_KEY="your_api_key_here" \
unraid-mcp-server
```
### 🔧 Development Installation ### 🔧 Development Installation
For development and testing: For development and testing:
@@ -209,7 +163,7 @@ uv run unraid-mcp-server
### Environment Variables ### Environment Variables
Create `.env` file in the project root: Credentials and settings go in `~/.unraid-mcp/.env` (the canonical location loaded by all runtimes — plugin and direct `uv run`). See the [Credential Setup](#%EF%B8%8F-credential-setup) section above for how to create it.
```bash ```bash
# Core API Configuration (Required) # Core API Configuration (Required)
@@ -217,7 +171,7 @@ UNRAID_API_URL=https://your-unraid-server-url/graphql
UNRAID_API_KEY=your_unraid_api_key UNRAID_API_KEY=your_unraid_api_key
# MCP Server Settings # MCP Server Settings
UNRAID_MCP_TRANSPORT=streamable-http # streamable-http (recommended), sse (deprecated), stdio UNRAID_MCP_TRANSPORT=stdio # stdio (default)
UNRAID_MCP_HOST=0.0.0.0 UNRAID_MCP_HOST=0.0.0.0
UNRAID_MCP_PORT=6970 UNRAID_MCP_PORT=6970
@@ -232,58 +186,15 @@ UNRAID_VERIFY_SSL=true # true, false, or path to CA bundle
UNRAID_AUTO_START_SUBSCRIPTIONS=true # Auto-start WebSocket subscriptions on startup (default: true) UNRAID_AUTO_START_SUBSCRIPTIONS=true # Auto-start WebSocket subscriptions on startup (default: true)
UNRAID_MAX_RECONNECT_ATTEMPTS=10 # Max WebSocket reconnection attempts (default: 10) UNRAID_MAX_RECONNECT_ATTEMPTS=10 # Max WebSocket reconnection attempts (default: 10)
# Optional: Log Stream Configuration # Optional: Auto-start log file subscription path
# UNRAID_AUTOSTART_LOG_PATH=/var/log/syslog # Override log path for unraid://logs/stream (auto-detects /var/log/syslog if unset) # Defaults to /var/log/syslog if it exists and this is unset
# UNRAID_AUTOSTART_LOG_PATH=/var/log/syslog
# Optional: Credentials directory override (default: ~/.unraid-mcp/)
# Useful for containers or non-standard home directory layouts
# UNRAID_CREDENTIALS_DIR=/custom/path/to/credentials
``` ```
### Transport Options
| Transport | Description | Use Case |
|-----------|-------------|----------|
| `streamable-http` | HTTP-based (recommended) | Most compatible, best performance |
| `sse` | Server-Sent Events (deprecated) | Legacy support only |
| `stdio` | Standard I/O | Direct integration scenarios |
---
## 🔐 Authentication (Optional)
Two independent auth methods — use either or both.
### Google OAuth
Protect the HTTP server with Google OAuth 2.0 — clients must complete a Google login before any tool call is executed.
```bash
# Add to ~/.unraid-mcp/.env
GOOGLE_CLIENT_ID=your-client-id.apps.googleusercontent.com
GOOGLE_CLIENT_SECRET=GOCSPX-your-secret
UNRAID_MCP_BASE_URL=http://10.1.0.2:6970 # public URL of this server
UNRAID_MCP_JWT_SIGNING_KEY=<64-char-hex> # prevents token invalidation on restart
```
**Quick setup:**
1. [Google Cloud Console](https://console.cloud.google.com/) → Credentials → OAuth 2.0 Client ID (Web application)
2. Authorized redirect URI: `<UNRAID_MCP_BASE_URL>/auth/callback`
3. Copy Client ID + Secret into `~/.unraid-mcp/.env`
4. Generate a signing key: `python3 -c "import secrets; print(secrets.token_hex(32))"`
5. Restart the server
### API Key (Bearer Token)
Simpler option for headless/machine access — no browser flow required:
```bash
# Add to ~/.unraid-mcp/.env
UNRAID_MCP_API_KEY=your-secret-token # can be same value as UNRAID_API_KEY
```
Clients present it as `Authorization: Bearer <UNRAID_MCP_API_KEY>`. Set both `GOOGLE_CLIENT_ID` and `UNRAID_MCP_API_KEY` to accept either method simultaneously.
Omit both to run without authentication (default — open server).
**Full guide:** [`docs/AUTHENTICATION.md`](docs/AUTHENTICATION.md)
--- ---
## 🛠️ Available Tools & Resources ## 🛠️ Available Tools & Resources
@@ -388,8 +299,6 @@ unraid-mcp/
├── skills/unraid/ # Claude skill assets ├── skills/unraid/ # Claude skill assets
├── .claude-plugin/ # Plugin manifest & marketplace config ├── .claude-plugin/ # Plugin manifest & marketplace config
├── .env.example # Environment template ├── .env.example # Environment template
├── Dockerfile # Container image definition
├── docker-compose.yml # Docker Compose deployment
├── pyproject.toml # Project config & dependencies ├── pyproject.toml # Project config & dependencies
└── logs/ # Log files (auto-created, gitignored) └── logs/ # Log files (auto-created, gitignored)
``` ```
@@ -409,17 +318,14 @@ uv run pytest
### Integration Smoke-Tests (mcporter) ### Integration Smoke-Tests (mcporter)
Live integration tests that exercise all non-destructive actions via [mcporter](https://github.com/mcporter/mcporter). Two scripts cover two transport modes: Live integration tests that exercise all non-destructive actions via [mcporter](https://github.com/mcporter/mcporter).
```bash ```bash
# stdio — no running server needed (good for CI) # stdio — no running server needed (good for CI)
./tests/mcporter/test-tools.sh [--parallel] [--timeout-ms N] [--verbose] ./tests/mcporter/test-tools.sh [--parallel] [--timeout-ms N] [--verbose]
# HTTP — connects to a live server (most up-to-date coverage)
./tests/mcporter/test-actions.sh [MCP_URL] # default: http://localhost:6970/mcp
``` ```
Destructive actions are always skipped in both scripts. For safe testing strategies and exact mcporter commands per destructive action, see [`docs/DESTRUCTIVE_ACTIONS.md`](docs/DESTRUCTIVE_ACTIONS.md). Destructive actions are always skipped. For safe testing strategies and exact mcporter commands per destructive action, see [`docs/DESTRUCTIVE_ACTIONS.md`](docs/DESTRUCTIVE_ACTIONS.md).
### API Schema Docs Automation ### API Schema Docs Automation
```bash ```bash
@@ -443,24 +349,12 @@ uv run unraid-mcp-server
# Or run via module directly # Or run via module directly
uv run -m unraid_mcp.main uv run -m unraid_mcp.main
# Hot-reload dev server (restarts on file changes)
fastmcp run fastmcp.http.json --reload
# Run via named config files # Run via named config files
fastmcp run fastmcp.http.json # streamable-http on :6970
fastmcp run fastmcp.stdio.json # stdio transport fastmcp run fastmcp.stdio.json # stdio transport
``` ```
### Ad-hoc Tool Testing (fastmcp CLI) ### Ad-hoc Tool Testing (fastmcp CLI)
```bash ```bash
# Introspect the running server
fastmcp list http://localhost:6970/mcp
fastmcp list http://localhost:6970/mcp --input-schema
# Call a tool directly (HTTP)
fastmcp call http://localhost:6970/mcp unraid action=health subaction=check
fastmcp call http://localhost:6970/mcp unraid action=docker subaction=list
# Call without a running server (stdio config) # Call without a running server (stdio config)
fastmcp list fastmcp.stdio.json fastmcp list fastmcp.stdio.json
fastmcp call fastmcp.stdio.json unraid action=health subaction=check fastmcp call fastmcp.stdio.json unraid action=health subaction=check

View File

@@ -1,49 +0,0 @@
services:
unraid-mcp:
build:
context: .
dockerfile: Dockerfile
container_name: unraid-mcp
restart: unless-stopped
read_only: true
cap_drop:
- ALL
tmpfs:
- /tmp:noexec,nosuid,size=64m
- /app/logs:noexec,nosuid,size=16m
- /app/.cache/logs:noexec,nosuid,size=8m
ports:
# HostPort:ContainerPort (maps to UNRAID_MCP_PORT inside the container, default 6970)
# Change the host port (left side) if 6970 is already in use on your host
- "${UNRAID_MCP_PORT:-6970}:${UNRAID_MCP_PORT:-6970}"
env_file:
- path: ${HOME}/.unraid-mcp/.env
required: false # Don't fail if file missing; environment: block below takes over
environment:
# Core API Configuration (Required)
# Sourced from ~/.unraid-mcp/.env via env_file above (if present),
# or set these directly here. The :? syntax fails fast if unset.
- UNRAID_API_URL=${UNRAID_API_URL:?UNRAID_API_URL is required}
- UNRAID_API_KEY=${UNRAID_API_KEY:?UNRAID_API_KEY is required}
# MCP Server Settings
- UNRAID_MCP_PORT=${UNRAID_MCP_PORT:-6970}
- UNRAID_MCP_HOST=${UNRAID_MCP_HOST:-0.0.0.0}
- UNRAID_MCP_TRANSPORT=${UNRAID_MCP_TRANSPORT:-streamable-http}
# SSL Configuration
- UNRAID_VERIFY_SSL=${UNRAID_VERIFY_SSL:-true}
# Logging Configuration
- UNRAID_MCP_LOG_LEVEL=${UNRAID_MCP_LOG_LEVEL:-INFO}
- UNRAID_MCP_LOG_FILE=${UNRAID_MCP_LOG_FILE:-unraid-mcp.log}
# Real-time Subscription Configuration
- UNRAID_AUTO_START_SUBSCRIPTIONS=${UNRAID_AUTO_START_SUBSCRIPTIONS:-true}
- UNRAID_MAX_RECONNECT_ATTEMPTS=${UNRAID_MAX_RECONNECT_ATTEMPTS:-10}
# Optional: Custom log file path for subscription auto-start diagnostics
- UNRAID_AUTOSTART_LOG_PATH=${UNRAID_AUTOSTART_LOG_PATH}
# Optional: If you want to mount a specific directory for logs (ensure UNRAID_MCP_LOG_FILE points within this mount)
# volumes:
# - ./logs:/app/logs # Example: maps ./logs on host to /app/logs in container

View File

@@ -1,188 +0,0 @@
# Authentication Setup Guide
This document covers both Google OAuth 2.0 and API key bearer token authentication for the Unraid MCP HTTP server. It explains how to protect the server using FastMCP's built-in `GoogleProvider` for OAuth, or a static bearer token for headless/machine access.
---
## Overview
By default the MCP server is **open** — any client on the network can call tools. Setting three environment variables enables Google OAuth 2.1 authentication: clients must complete a Google login flow before the server will execute any tool.
OAuth state (issued tokens, refresh tokens) is persisted to an encrypted file store at `~/.local/share/fastmcp/oauth-proxy/`, so sessions survive server restarts when `UNRAID_MCP_JWT_SIGNING_KEY` is set.
> **Transport requirement**: OAuth only works with HTTP transports (`streamable-http` or `sse`). It has no effect on `stdio` — the server logs a warning if you configure both.
---
## Prerequisites
- Google account with access to [Google Cloud Console](https://console.cloud.google.com/)
- MCP server reachable at a known URL from your browser (LAN IP, Tailscale IP, or public domain)
- `UNRAID_MCP_TRANSPORT=streamable-http` (the default)
---
## Step 1: Create a Google OAuth Client
1. Open [Google Cloud Console](https://console.cloud.google.com/) → **APIs & Services****Credentials**
2. Click **Create Credentials****OAuth 2.0 Client ID**
3. Application type: **Web application**
4. Name: anything (e.g. `Unraid MCP`)
5. **Authorized redirect URIs** — add exactly:
```
http://<your-server-ip>:6970/auth/callback
```
Replace `<your-server-ip>` with the IP/hostname your browser uses to reach the MCP server (e.g. `10.1.0.2`, `100.x.x.x` for Tailscale, or a domain name).
6. Click **Create** — copy the **Client ID** and **Client Secret**
---
## Step 2: Configure Environment Variables
Add these to `~/.unraid-mcp/.env` (the canonical credential file for all runtimes):
```bash
# Google OAuth (optional — enables authentication)
GOOGLE_CLIENT_ID=your-client-id.apps.googleusercontent.com
GOOGLE_CLIENT_SECRET=GOCSPX-your-client-secret
# Public base URL of this MCP server (must match the redirect URI above)
UNRAID_MCP_BASE_URL=http://10.1.0.2:6970
# Stable JWT signing key — prevents token invalidation on server restart
# Generate one: python3 -c "import secrets; print(secrets.token_hex(32))"
UNRAID_MCP_JWT_SIGNING_KEY=your-64-char-hex-string
```
**All four variables at once** (copy-paste template):
```bash
cat >> ~/.unraid-mcp/.env <<'EOF'
# Google OAuth
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=
UNRAID_MCP_BASE_URL=http://10.1.0.2:6970
UNRAID_MCP_JWT_SIGNING_KEY=
EOF
```
Then fill in the blanks.
---
## Step 3: Generate a Stable JWT Signing Key
Without `UNRAID_MCP_JWT_SIGNING_KEY`, FastMCP derives a key on startup. Any server restart invalidates all existing tokens and forces every client to re-authenticate.
Generate a stable key once:
```bash
python3 -c "import secrets; print(secrets.token_hex(32))"
```
Paste the output into `UNRAID_MCP_JWT_SIGNING_KEY`. This value never needs to change unless you intentionally want to invalidate all sessions.
---
## Step 4: Restart the Server
```bash
# Docker Compose
docker compose restart unraid-mcp
# Direct / uv
uv run unraid-mcp-server
```
On startup you should see:
```
INFO [SERVER] Google OAuth enabled — base_url=http://10.1.0.2:6970, redirect_uri=http://10.1.0.2:6970/auth/callback
```
---
## How Authentication Works
1. An MCP client connects to `http://<server>:6970/mcp`
2. The server responds with a `401 Unauthorized` and an OAuth authorization URL
3. The client opens the URL in a browser; the user logs in with Google
4. Google redirects to `<UNRAID_MCP_BASE_URL>/auth/callback` with an authorization code
5. FastMCP exchanges the code for tokens, issues a signed JWT, and returns it to the client
6. The client includes the JWT in subsequent requests — the server validates it without hitting Google again
7. Tokens persist to `~/.local/share/fastmcp/oauth-proxy/` — sessions survive server restarts
---
## Environment Variable Reference
| Variable | Required | Default | Description |
|----------|----------|---------|-------------|
| `GOOGLE_CLIENT_ID` | For OAuth | `""` | OAuth 2.0 Client ID from Google Cloud Console |
| `GOOGLE_CLIENT_SECRET` | For OAuth | `""` | OAuth 2.0 Client Secret from Google Cloud Console |
| `UNRAID_MCP_BASE_URL` | For OAuth | `""` | Public base URL of this server — must match the authorized redirect URI |
| `UNRAID_MCP_JWT_SIGNING_KEY` | Recommended | auto-derived | Stable 32+ char secret for JWT signing — prevents token invalidation on restart |
OAuth is activated only when **all three** of `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET`, and `UNRAID_MCP_BASE_URL` are non-empty. Omit any one to run without authentication.
---
## Disabling OAuth
Remove (or empty) `GOOGLE_CLIENT_ID` from `~/.unraid-mcp/.env` and restart. The server reverts to unauthenticated mode and logs:
```
WARNING [SERVER] No authentication configured — MCP server is open to all clients on the network.
```
---
## Troubleshooting
**`redirect_uri_mismatch` from Google**
The redirect URI in Google Cloud Console must exactly match `<UNRAID_MCP_BASE_URL>/auth/callback` — same scheme, host, port, and path. Trailing slashes matter.
**Tokens invalidated after restart**
Set `UNRAID_MCP_JWT_SIGNING_KEY` to a stable secret (see Step 3). Without it, FastMCP generates a new key on every start.
**`stdio` transport warning**
OAuth requires an HTTP transport. Set `UNRAID_MCP_TRANSPORT=streamable-http` (the default) or `sse`.
**Client cannot reach the callback URL**
`UNRAID_MCP_BASE_URL` must be the address your browser uses to reach the server — not `localhost` or `0.0.0.0`. Use the LAN IP, Tailscale IP, or a domain name.
**OAuth configured but server not starting**
Check `logs/unraid-mcp.log` or `docker compose logs unraid-mcp` for startup errors.
---
## API Key Authentication (Alternative / Combined)
For machine-to-machine access (scripts, CI, other agents) without a browser-based OAuth flow, set `UNRAID_MCP_API_KEY`:
```bash
# In ~/.unraid-mcp/.env
UNRAID_MCP_API_KEY=your-secret-token
```
Clients present it as a standard bearer token:
```
Authorization: Bearer your-secret-token
```
**Combining with Google OAuth**: set both `GOOGLE_CLIENT_ID` and `UNRAID_MCP_API_KEY`. The server activates `MultiAuth` and accepts either method — Google OAuth for interactive clients, API key for headless clients.
**Reusing the Unraid API key**: you can set `UNRAID_MCP_API_KEY` to the same value as `UNRAID_API_KEY` for simplicity. The two vars are kept separate so each concern has its own name.
**Standalone API key** (no Google OAuth): set only `UNRAID_MCP_API_KEY`. The server validates bearer tokens directly with no OAuth redirect flow.
---
## Security Notes
- OAuth protects the MCP HTTP interface — the Unraid GraphQL API itself still uses `UNRAID_API_KEY`
- OAuth state files at `~/.local/share/fastmcp/oauth-proxy/` should be on a private filesystem; do not expose them
- Restrict Google OAuth to specific accounts via the Google Cloud Console **OAuth consent screen** → **Test users** if you don't want to publish the app
- `UNRAID_MCP_JWT_SIGNING_KEY` is a credential — store it in `~/.unraid-mcp/.env` (mode 600), never in source control

View File

@@ -1,14 +1,14 @@
# Destructive Actions # Destructive Actions
**Last Updated:** 2026-03-16 **Last Updated:** 2026-03-24
**Total destructive actions:** 12 across 8 domains (single `unraid` tool) **Total destructive actions:** 12 across 8 domains (single `unraid` tool)
All destructive actions require `confirm=True` at the call site. There is no additional environment variable gate — `confirm` is the sole guard. All destructive actions require `confirm=True` at the call site. There is no additional environment variable gate — `confirm` is the sole guard.
> **mcporter commands below** use `$MCP_URL` (default: `http://localhost:6970/mcp`). Run `test-actions.sh` for automated non-destructive coverage; destructive actions are always skipped there and tested manually per the strategies below. > **mcporter commands below** use stdio transport. Run `test-tools.sh` for automated non-destructive coverage; destructive actions are always skipped there and tested manually per the strategies below.
> >
> **Calling convention (v1.0.0+):** All operations use the single `unraid` tool with `action` (domain) + `subaction` (operation). For example: > **Calling convention (v1.0.0+):** All operations use the single `unraid` tool with `action` (domain) + `subaction` (operation). For example:
> `mcporter call --http-url "$MCP_URL" --tool unraid --args '{"action":"docker","subaction":"list"}'` > `mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid --args '{"action":"docker","subaction":"list"}'`
--- ---
@@ -26,7 +26,7 @@ Stopping the array unmounts all shares and can interrupt running containers and
```bash ```bash
# Prerequisite: array must already be stopped; use a disk you intend to remove # Prerequisite: array must already be stopped; use a disk you intend to remove
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"array","subaction":"remove_disk","disk_id":"<DISK_ID>","confirm":true}' --output json --args '{"action":"array","subaction":"remove_disk","disk_id":"<DISK_ID>","confirm":true}' --output json
``` ```
@@ -36,11 +36,11 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# Discover disk IDs # Discover disk IDs
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"disk","subaction":"disks"}' --output json --args '{"action":"disk","subaction":"disks"}' --output json
# Clear stats for a specific disk # Clear stats for a specific disk
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"array","subaction":"clear_disk_stats","disk_id":"<DISK_ID>","confirm":true}' --output json --args '{"action":"array","subaction":"clear_disk_stats","disk_id":"<DISK_ID>","confirm":true}' --output json
``` ```
@@ -54,15 +54,15 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
# Prerequisite: create a minimal Alpine test VM in Unraid VM manager # Prerequisite: create a minimal Alpine test VM in Unraid VM manager
# (Alpine ISO, 512MB RAM, no persistent disk, name contains "mcp-test") # (Alpine ISO, 512MB RAM, no persistent disk, name contains "mcp-test")
VID=$(mcporter call --http-url "$MCP_URL" --tool unraid \ VID=$(mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"vm","subaction":"list"}' --output json \ --args '{"action":"vm","subaction":"list"}' --output json \
| python3 -c "import json,sys; vms=json.load(sys.stdin).get('vms',[]); print(next(v.get('uuid',v.get('id','')) for v in vms if 'mcp-test' in v.get('name','')))") | python3 -c "import json,sys; vms=json.load(sys.stdin).get('vms',[]); print(next(v.get('uuid',v.get('id','')) for v in vms if 'mcp-test' in v.get('name','')))")
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"vm\",\"subaction\":\"force_stop\",\"vm_id\":\"$VID\",\"confirm\":true}" --output json --args "{\"action\":\"vm\",\"subaction\":\"force_stop\",\"vm_id\":\"$VID\",\"confirm\":true}" --output json
# Verify: VM state should return to stopped # Verify: VM state should return to stopped
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"vm\",\"subaction\":\"details\",\"vm_id\":\"$VID\"}" --output json --args "{\"action\":\"vm\",\"subaction\":\"details\",\"vm_id\":\"$VID\"}" --output json
``` ```
@@ -72,11 +72,11 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# Same minimal Alpine test VM as above # Same minimal Alpine test VM as above
VID=$(mcporter call --http-url "$MCP_URL" --tool unraid \ VID=$(mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"vm","subaction":"list"}' --output json \ --args '{"action":"vm","subaction":"list"}' --output json \
| python3 -c "import json,sys; vms=json.load(sys.stdin).get('vms',[]); print(next(v.get('uuid',v.get('id','')) for v in vms if 'mcp-test' in v.get('name','')))") | python3 -c "import json,sys; vms=json.load(sys.stdin).get('vms',[]); print(next(v.get('uuid',v.get('id','')) for v in vms if 'mcp-test' in v.get('name','')))")
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"vm\",\"subaction\":\"reset\",\"vm_id\":\"$VID\",\"confirm\":true}" --output json --args "{\"action\":\"vm\",\"subaction\":\"reset\",\"vm_id\":\"$VID\",\"confirm\":true}" --output json
``` ```
@@ -89,9 +89,9 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# 1. Create a test notification, then list to get the real stored ID (create response # 1. Create a test notification, then list to get the real stored ID (create response
# ID is ULID-based; stored filename uses a unix timestamp, so IDs differ) # ID is ULID-based; stored filename uses a unix timestamp, so IDs differ)
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"create","title":"mcp-test-delete","subject":"safe to delete","description":"MCP destructive action test","importance":"INFO"}' --output json --args '{"action":"notification","subaction":"create","title":"mcp-test-delete","subject":"safe to delete","description":"MCP destructive action test","importance":"INFO"}' --output json
NID=$(mcporter call --http-url "$MCP_URL" --tool unraid \ NID=$(mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"list","notification_type":"UNREAD"}' --output json \ --args '{"action":"notification","subaction":"list","notification_type":"UNREAD"}' --output json \
| python3 -c " | python3 -c "
import json,sys import json,sys
@@ -100,11 +100,11 @@ matches=[n['id'] for n in reversed(notifs) if n.get('title')=='mcp-test-delete']
print(matches[0] if matches else '')") print(matches[0] if matches else '')")
# 2. Delete it (notification_type required) # 2. Delete it (notification_type required)
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"notification\",\"subaction\":\"delete\",\"notification_id\":\"$NID\",\"notification_type\":\"UNREAD\",\"confirm\":true}" --output json --args "{\"action\":\"notification\",\"subaction\":\"delete\",\"notification_id\":\"$NID\",\"notification_type\":\"UNREAD\",\"confirm\":true}" --output json
# 3. Verify # 3. Verify
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"list"}' --output json | python3 -c \ --args '{"action":"notification","subaction":"list"}' --output json | python3 -c \
"import json,sys; ns=[n for n in json.load(sys.stdin).get('notifications',[]) if 'mcp-test' in n.get('title','')]; print('clean' if not ns else ns)" "import json,sys; ns=[n for n in json.load(sys.stdin).get('notifications',[]) if 'mcp-test' in n.get('title','')]; print('clean' if not ns else ns)"
``` ```
@@ -115,21 +115,21 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# 1. Create and archive a test notification # 1. Create and archive a test notification
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"create","title":"mcp-test-archive-wipe","subject":"archive me","description":"safe to delete","importance":"INFO"}' --output json --args '{"action":"notification","subaction":"create","title":"mcp-test-archive-wipe","subject":"archive me","description":"safe to delete","importance":"INFO"}' --output json
AID=$(mcporter call --http-url "$MCP_URL" --tool unraid \ AID=$(mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"list","notification_type":"UNREAD"}' --output json \ --args '{"action":"notification","subaction":"list","notification_type":"UNREAD"}' --output json \
| python3 -c " | python3 -c "
import json,sys import json,sys
notifs=json.load(sys.stdin).get('notifications',[]) notifs=json.load(sys.stdin).get('notifications',[])
matches=[n['id'] for n in reversed(notifs) if n.get('title')=='mcp-test-archive-wipe'] matches=[n['id'] for n in reversed(notifs) if n.get('title')=='mcp-test-archive-wipe']
print(matches[0] if matches else '')") print(matches[0] if matches else '')")
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"notification\",\"subaction\":\"archive\",\"notification_id\":\"$AID\"}" --output json --args "{\"action\":\"notification\",\"subaction\":\"archive\",\"notification_id\":\"$AID\"}" --output json
# 2. Wipe all archived # 2. Wipe all archived
# NOTE: this deletes ALL archived notifications, not just the test one # NOTE: this deletes ALL archived notifications, not just the test one
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"notification","subaction":"delete_archived","confirm":true}' --output json --args '{"action":"notification","subaction":"delete_archived","confirm":true}' --output json
``` ```
@@ -144,15 +144,15 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# 1. Create a throwaway local remote (points to /tmp — no real data) # 1. Create a throwaway local remote (points to /tmp — no real data)
# Parameters: name (str), provider_type (str), config_data (dict) # Parameters: name (str), provider_type (str), config_data (dict)
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"rclone","subaction":"create_remote","name":"mcp-test-remote","provider_type":"local","config_data":{"root":"/tmp"}}' --output json --args '{"action":"rclone","subaction":"create_remote","name":"mcp-test-remote","provider_type":"local","config_data":{"root":"/tmp"}}' --output json
# 2. Delete it # 2. Delete it
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"rclone","subaction":"delete_remote","name":"mcp-test-remote","confirm":true}' --output json --args '{"action":"rclone","subaction":"delete_remote","name":"mcp-test-remote","confirm":true}' --output json
# 3. Verify # 3. Verify
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"rclone","subaction":"list_remotes"}' --output json | python3 -c \ --args '{"action":"rclone","subaction":"list_remotes"}' --output json | python3 -c \
"import json,sys; remotes=json.load(sys.stdin).get('remotes',[]); print('clean' if 'mcp-test-remote' not in remotes else 'FOUND — cleanup failed')" "import json,sys; remotes=json.load(sys.stdin).get('remotes',[]); print('clean' if 'mcp-test-remote' not in remotes else 'FOUND — cleanup failed')"
``` ```
@@ -167,16 +167,16 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
```bash ```bash
# 1. Create a test key (names cannot contain hyphens; ID is at key.id) # 1. Create a test key (names cannot contain hyphens; ID is at key.id)
KID=$(mcporter call --http-url "$MCP_URL" --tool unraid \ KID=$(mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"key","subaction":"create","name":"mcp test key","roles":["VIEWER"]}' --output json \ --args '{"action":"key","subaction":"create","name":"mcp test key","roles":["VIEWER"]}' --output json \
| python3 -c "import json,sys; print(json.load(sys.stdin).get('key',{}).get('id',''))") | python3 -c "import json,sys; print(json.load(sys.stdin).get('key',{}).get('id',''))")
# 2. Delete it # 2. Delete it
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args "{\"action\":\"key\",\"subaction\":\"delete\",\"key_id\":\"$KID\",\"confirm\":true}" --output json --args "{\"action\":\"key\",\"subaction\":\"delete\",\"key_id\":\"$KID\",\"confirm\":true}" --output json
# 3. Verify # 3. Verify
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"key","subaction":"list"}' --output json | python3 -c \ --args '{"action":"key","subaction":"list"}' --output json | python3 -c \
"import json,sys; ks=json.load(sys.stdin).get('keys',[]); print('clean' if not any('mcp test key' in k.get('name','') for k in ks) else 'FOUND — cleanup failed')" "import json,sys; ks=json.load(sys.stdin).get('keys',[]); print('clean' if not any('mcp test key' in k.get('name','') for k in ks) else 'FOUND — cleanup failed')"
``` ```
@@ -191,7 +191,7 @@ mcporter call --http-url "$MCP_URL" --tool unraid \
# Prerequisite: create a dedicated test remote pointing away from real backup destination # Prerequisite: create a dedicated test remote pointing away from real backup destination
# (use rclone create_remote first, or configure mcp-test-remote manually) # (use rclone create_remote first, or configure mcp-test-remote manually)
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"disk","subaction":"flash_backup","remote_name":"mcp-test-remote","source_path":"/boot","destination_path":"/flash-backup-test","confirm":true}' --output json --args '{"action":"disk","subaction":"flash_backup","remote_name":"mcp-test-remote","source_path":"/boot","destination_path":"/flash-backup-test","confirm":true}' --output json
``` ```
@@ -217,7 +217,7 @@ Removing a plugin cannot be undone without a full re-install. Test via `tests/sa
```bash ```bash
# If live testing is necessary (intentional removal only): # If live testing is necessary (intentional removal only):
mcporter call --http-url "$MCP_URL" --tool unraid \ mcporter call --stdio-cmd "uv run unraid-mcp-server" --tool unraid \
--args '{"action":"plugin","subaction":"remove","names":["<plugin-name>"],"confirm":true}' --output json --args '{"action":"plugin","subaction":"remove","names":["<plugin-name>"],"confirm":true}' --output json
``` ```

View File

@@ -11,37 +11,77 @@ The marketplace catalog that lists all available plugins in this repository.
**Contents:** **Contents:**
- Marketplace metadata (name, version, owner, repository) - Marketplace metadata (name, version, owner, repository)
- Plugin catalog with the "unraid" skill - Plugin catalog with the "unraid" plugin
- Categories and tags for discoverability - Categories and tags for discoverability
### 2. Plugin Manifest (`.claude-plugin/plugin.json`) ### 2. Plugin Manifest (`.claude-plugin/plugin.json`)
The individual plugin configuration for the Unraid skill. The individual plugin configuration for the Unraid MCP server.
**Location:** `.claude-plugin/plugin.json` **Location:** `.claude-plugin/plugin.json`
**Contents:** **Contents:**
- Plugin name, version, author - Plugin name (`unraid`), version (`1.1.2`), author
- Repository and homepage links - Repository and homepage links
- Plugin-specific metadata - `mcpServers` block that configures the server to run via `uv run unraid-mcp-server` in stdio mode
### 3. Documentation ### 3. Validation Script
- `.claude-plugin/README.md` - Marketplace installation guide - `scripts/validate-marketplace.sh` — Automated validation of marketplace structure
- Updated root `README.md` with plugin installation section
### 4. Validation Script ## MCP Tools Exposed
- `scripts/validate-marketplace.sh` - Automated validation of marketplace structure
The plugin registers **3 MCP tools**:
| Tool | Purpose |
|------|---------|
| `unraid` | Primary tool — `action` (domain) + `subaction` (operation) routing, ~107 subactions across 15 domains |
| `diagnose_subscriptions` | Inspect WebSocket subscription connection states and errors |
| `test_subscription_query` | Test a specific GraphQL subscription query (allowlisted fields only) |
### Calling Convention
All Unraid operations go through the single `unraid` tool:
```
unraid(action="docker", subaction="list")
unraid(action="system", subaction="overview")
unraid(action="array", subaction="parity_status")
unraid(action="vm", subaction="list")
unraid(action="live", subaction="cpu")
```
### Domains (action=)
| action | example subactions |
|--------|--------------------|
| `system` | overview, array, network, metrics, services, ups_devices |
| `health` | check, test_connection, diagnose, setup |
| `array` | parity_status, parity_start, start_array, add_disk |
| `disk` | shares, disks, disk_details, logs |
| `docker` | list, details, start, stop, restart |
| `vm` | list, details, start, stop, pause, resume |
| `notification` | overview, list, create, archive, archive_all |
| `key` | list, get, create, update, delete |
| `plugin` | list, add, remove |
| `rclone` | list_remotes, config_form, create_remote |
| `setting` | update, configure_ups |
| `customization` | theme, set_theme, sso_enabled |
| `oidc` | providers, configuration, validate_session |
| `user` | me |
| `live` | cpu, memory, array_state, log_tail, notification_feed |
Destructive subactions (e.g. `stop_array`, `force_stop`, `delete`) require `confirm=True`.
## Installation Methods ## Installation Methods
### Method 1: GitHub Distribution (Recommended for Users) ### Method 1: GitHub Distribution (Recommended for Users)
Once you push this to GitHub, users can install via: Once pushed to GitHub, users install via:
```bash ```bash
# Add your marketplace # Add the marketplace
/plugin marketplace add jmagar/unraid-mcp /plugin marketplace add jmagar/unraid-mcp
# Install the Unraid skill # Install the Unraid plugin
/plugin install unraid @unraid-mcp /plugin install unraid @unraid-mcp
``` ```
@@ -59,7 +99,7 @@ For testing locally before publishing:
### Method 3: Direct URL ### Method 3: Direct URL
Users can also install from a specific commit or branch: Install from a specific branch or commit:
```bash ```bash
# From specific branch # From specific branch
@@ -75,14 +115,14 @@ Users can also install from a specific commit or branch:
unraid-mcp/ unraid-mcp/
├── .claude-plugin/ # Plugin manifest + marketplace manifest ├── .claude-plugin/ # Plugin manifest + marketplace manifest
│ ├── plugin.json # Plugin configuration (name, version, mcpServers) │ ├── plugin.json # Plugin configuration (name, version, mcpServers)
── marketplace.json # Marketplace catalog ── marketplace.json # Marketplace catalog
│ └── README.md # Marketplace installation guide ├── unraid_mcp/ # Python package (the actual MCP server)
├── skills/unraid/ # Skill documentation and helpers │ ├── main.py # Entry point
│ ├── SKILL.md # Skill documentation │ ├── server.py # FastMCP server registration
│ ├── README.md # Plugin documentation │ ├── tools/unraid.py # Consolidated tool (all 3 tools registered here)
│ ├── examples/ # Example scripts │ ├── config/ # Settings management
│ ├── scripts/ # Helper scripts │ ├── core/ # GraphQL client, exceptions, shared types
│ └── references/ # API reference docs │ └── subscriptions/ # Real-time WebSocket subscription manager
└── scripts/ └── scripts/
└── validate-marketplace.sh # Validation tool └── validate-marketplace.sh # Validation tool
``` ```
@@ -90,15 +130,15 @@ unraid-mcp/
## Marketplace Metadata ## Marketplace Metadata
### Categories ### Categories
- `infrastructure` - Server management and monitoring tools - `infrastructure` Server management and monitoring tools
### Tags ### Tags
- `unraid` - Unraid-specific functionality - `unraid` Unraid-specific functionality
- `monitoring` - System monitoring capabilities - `monitoring` System monitoring capabilities
- `homelab` - Homelab automation - `homelab` Homelab automation
- `graphql` - GraphQL API integration - `graphql` GraphQL API integration
- `docker` - Docker container management - `docker` Docker container management
- `virtualization` - VM management - `virtualization` VM management
## Publishing Checklist ## Publishing Checklist
@@ -109,10 +149,10 @@ Before publishing to GitHub:
./scripts/validate-marketplace.sh ./scripts/validate-marketplace.sh
``` ```
2. **Update Version Numbers** 2. **Update Version Numbers** (must be in sync)
- Bump version in `.claude-plugin/marketplace.json` - `pyproject.toml` → `version = "X.Y.Z"` under `[project]`
- Bump version in `.claude-plugin/plugin.json` - `.claude-plugin/plugin.json` → `"version": "X.Y.Z"`
- Update version in `README.md` if needed - `.claude-plugin/marketplace.json` → `"version"` in both `metadata` and `plugins[]`
3. **Test Locally** 3. **Test Locally**
```bash ```bash
@@ -123,33 +163,38 @@ Before publishing to GitHub:
4. **Commit and Push** 4. **Commit and Push**
```bash ```bash
git add .claude-plugin/ git add .claude-plugin/
git commit -m "feat: add Claude Code marketplace configuration" git commit -m "chore: bump marketplace to vX.Y.Z"
git push origin main git push origin main
``` ```
5. **Create Release Tag** (Optional) 5. **Create Release Tag**
```bash ```bash
git tag -a v1.0.0 -m "Release v1.0.0" git tag -a vX.Y.Z -m "Release vX.Y.Z"
git push origin v1.0.0 git push origin vX.Y.Z
``` ```
## User Experience ## User Experience
After installation, users will: After installation, users can:
1. **See the skill in their skill list** 1. **Invoke Unraid operations directly in Claude Code**
```bash ```
/skill list unraid(action="system", subaction="overview")
unraid(action="docker", subaction="list")
unraid(action="health", subaction="check")
``` ```
2. **Access Unraid functionality directly** 2. **Use the credential setup tool on first run**
- Claude Code will automatically detect when to invoke the skill ```
- Users can explicitly invoke with `/unraid` unraid(action="health", subaction="setup")
```
This triggers elicitation to collect and persist credentials to `~/.unraid-mcp/.env`.
3. **Have access to all helper scripts** 3. **Monitor live data via subscriptions**
- Example scripts in `examples/` ```
- Utility scripts in `scripts/` unraid(action="live", subaction="cpu")
- API reference in `references/` unraid(action="live", subaction="log_tail")
```
## Maintenance ## Maintenance
@@ -157,31 +202,21 @@ After installation, users will:
To release a new version: To release a new version:
1. Make changes to the plugin 1. Make changes to the plugin code
2. Update version in `.claude-plugin/plugin.json` 2. Update version in `pyproject.toml`, `.claude-plugin/plugin.json`, and `.claude-plugin/marketplace.json`
3. Update marketplace catalog in `.claude-plugin/marketplace.json` 3. Run validation: `./scripts/validate-marketplace.sh`
4. Run validation: `./scripts/validate-marketplace.sh` 4. Commit and push
5. Commit and push
Users with the plugin installed will see the update available and can upgrade with: Users with the plugin installed will see the update available and can upgrade:
```bash ```bash
/plugin update unraid /plugin update unraid
``` ```
### Adding More Plugins
To add additional plugins to this marketplace:
1. Create new plugin directory: `skills/new-plugin/`
2. Add plugin manifest: `skills/new-plugin/.claude-plugin/plugin.json`
3. Update marketplace catalog: add entry to `.plugins[]` array in `.claude-plugin/marketplace.json`
4. Validate: `./scripts/validate-marketplace.sh`
## Support ## Support
- **Repository:** https://github.com/jmagar/unraid-mcp - **Repository:** https://github.com/jmagar/unraid-mcp
- **Issues:** https://github.com/jmagar/unraid-mcp/issues - **Issues:** https://github.com/jmagar/unraid-mcp/issues
- **Documentation:** See `.claude-plugin/README.md` and `skills/unraid/README.md` - **Destructive Actions:** `docs/DESTRUCTIVE_ACTIONS.md`
## Validation ## Validation
@@ -198,5 +233,3 @@ This checks:
- Plugin structure - Plugin structure
- Source path accuracy - Source path accuracy
- Documentation completeness - Documentation completeness
All 17 checks must pass before publishing.

View File

@@ -2,6 +2,26 @@
This guide covers how to publish `unraid-mcp` to PyPI so it can be installed via `uvx` or `pip` from anywhere. This guide covers how to publish `unraid-mcp` to PyPI so it can be installed via `uvx` or `pip` from anywhere.
## Package Overview
**PyPI package name:** `unraid-mcp`
**Entry point binary:** `unraid-mcp-server` (also aliased as `unraid-mcp`)
**Current version:** `1.1.2`
The package ships a FastMCP server exposing **3 MCP tools**:
- `unraid` — primary tool with `action` + `subaction` routing (~107 subactions, 15 domains)
- `diagnose_subscriptions` — WebSocket subscription diagnostics
- `test_subscription_query` — test individual GraphQL subscription queries
Tool call convention: `unraid(action="docker", subaction="list")`
### Version Sync Requirement
When bumping the version, **all three files must be updated together**:
- `pyproject.toml``version = "X.Y.Z"` under `[project]`
- `.claude-plugin/plugin.json``"version": "X.Y.Z"`
- `.claude-plugin/marketplace.json``"version"` in both `metadata` and `plugins[]`
## Prerequisites ## Prerequisites
1. **PyPI Account**: Create accounts on both: 1. **PyPI Account**: Create accounts on both:
@@ -40,7 +60,7 @@ Before publishing, update the version in `pyproject.toml`:
```toml ```toml
[project] [project]
version = "1.0.0" # Follow semantic versioning: MAJOR.MINOR.PATCH version = "1.1.2" # Follow semantic versioning: MAJOR.MINOR.PATCH
``` ```
**Semantic Versioning Guide:** **Semantic Versioning Guide:**
@@ -82,8 +102,8 @@ uv run python -m build
``` ```
This creates: This creates:
- `dist/unraid_mcp-VERSION-py3-none-any.whl` (wheel) - `dist/unraid_mcp-1.1.2-py3-none-any.whl` (wheel)
- `dist/unraid_mcp-VERSION.tar.gz` (source distribution) - `dist/unraid_mcp-1.1.2.tar.gz` (source distribution)
### 4. Validate the Package ### 4. Validate the Package
@@ -156,7 +176,7 @@ UNRAID_API_URL=https://your-server uvx unraid-mcp-server
**Benefits of uvx:** **Benefits of uvx:**
- No installation required - No installation required
- Automatic virtual environment management - Automatic virtual environment management
- Always uses the latest version (or specify version: `uvx unraid-mcp-server@1.0.0`) - Always uses the latest version (or specify version: `uvx unraid-mcp-server@1.1.2`)
- Clean execution environment - Clean execution environment
## Automation with GitHub Actions (Future) ## Automation with GitHub Actions (Future)

View File

@@ -1,23 +0,0 @@
{
"$schema": "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json",
"source": {
"path": "unraid_mcp/server.py",
"entrypoint": "mcp"
},
"environment": {
"type": "uv",
"python": "3.12",
"editable": ["."]
},
"deployment": {
"transport": "http",
"host": "0.0.0.0",
"port": 6970,
"path": "/mcp",
"log_level": "INFO",
"env": {
"UNRAID_API_URL": "${UNRAID_API_URL}",
"UNRAID_API_KEY": "${UNRAID_API_KEY}"
}
}
}

View File

@@ -1,20 +0,0 @@
{
"$schema": "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json",
"source": {
"path": "unraid_mcp/server.py",
"entrypoint": "mcp"
},
"environment": {
"type": "uv",
"python": "3.12",
"editable": ["."]
},
"deployment": {
"transport": "stdio",
"log_level": "INFO",
"env": {
"UNRAID_API_URL": "${UNRAID_API_URL}",
"UNRAID_API_KEY": "${UNRAID_API_KEY}"
}
}
}

View File

@@ -72,7 +72,7 @@ fi
# Check version sync between pyproject.toml and plugin.json # Check version sync between pyproject.toml and plugin.json
echo "Checking version sync..." echo "Checking version sync..."
TOML_VER=$(grep '^version = ' pyproject.toml | sed 's/version = "//;s/"//') TOML_VER=$(grep -m1 '^version = ' pyproject.toml | sed 's/version = "//;s/"//')
PLUGIN_VER=$(python3 -c "import json; print(json.load(open('.claude-plugin/plugin.json'))['version'])" 2>/dev/null || echo "ERROR_READING") PLUGIN_VER=$(python3 -c "import json; print(json.load(open('.claude-plugin/plugin.json'))['version'])" 2>/dev/null || echo "ERROR_READING")
if [ "$TOML_VER" != "$PLUGIN_VER" ]; then if [ "$TOML_VER" != "$PLUGIN_VER" ]; then
echo -e "${RED}FAIL: Version mismatch — pyproject.toml=$TOML_VER, plugin.json=$PLUGIN_VER${NC}" echo -e "${RED}FAIL: Version mismatch — pyproject.toml=$TOML_VER, plugin.json=$PLUGIN_VER${NC}"

View File

@@ -6,6 +6,12 @@ from unittest.mock import AsyncMock, patch
import pytest import pytest
from fastmcp import FastMCP from fastmcp import FastMCP
from hypothesis import settings
from hypothesis.database import DirectoryBasedExampleDatabase
# Configure hypothesis to use the .cache directory for its database
settings.register_profile("default", database=DirectoryBasedExampleDatabase(".cache/.hypothesis"))
settings.load_profile("default")
@pytest.fixture @pytest.fixture

View File

@@ -8,6 +8,7 @@ to verify the full request pipeline.
""" """
import json import json
from collections.abc import Callable
from typing import Any from typing import Any
from unittest.mock import patch from unittest.mock import patch
@@ -264,7 +265,7 @@ class TestInfoToolRequests:
"""Verify unraid system tool constructs correct GraphQL queries.""" """Verify unraid system tool constructs correct GraphQL queries."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -367,7 +368,7 @@ class TestDockerToolRequests:
"""Verify unraid docker tool constructs correct requests.""" """Verify unraid docker tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -535,7 +536,7 @@ class TestVMToolRequests:
"""Verify unraid vm tool constructs correct requests.""" """Verify unraid vm tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -625,7 +626,7 @@ class TestArrayToolRequests:
"""Verify unraid array tool constructs correct requests.""" """Verify unraid array tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -701,7 +702,7 @@ class TestStorageToolRequests:
"""Verify unraid disk tool constructs correct requests.""" """Verify unraid disk tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -799,7 +800,7 @@ class TestNotificationsToolRequests:
"""Verify unraid notification tool constructs correct requests.""" """Verify unraid notification tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -932,7 +933,7 @@ class TestRCloneToolRequests:
"""Verify unraid rclone tool constructs correct requests.""" """Verify unraid rclone tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -1029,7 +1030,7 @@ class TestUsersToolRequests:
"""Verify unraid user tool constructs correct requests.""" """Verify unraid user tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -1062,7 +1063,7 @@ class TestKeysToolRequests:
"""Verify unraid key tool constructs correct requests.""" """Verify unraid key tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock
@@ -1157,7 +1158,7 @@ class TestHealthToolRequests:
"""Verify unraid health tool constructs correct requests.""" """Verify unraid health tool constructs correct requests."""
@staticmethod @staticmethod
def _get_tool(): def _get_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
@respx.mock @respx.mock

View File

@@ -4,17 +4,7 @@ Live integration smoke-tests for the unraid-mcp server, exercising real API call
--- ---
## Two Scripts, Two Transports ## `test-tools.sh` — stdio, no running server needed
| | `test-tools.sh` | `test-actions.sh` |
|-|-----------------|-------------------|
| **Transport** | stdio | HTTP |
| **Server required** | No — launched ad-hoc per call | Yes — must be running at `$MCP_URL` |
| **Flags** | `--timeout-ms N`, `--parallel`, `--verbose` | positional `[MCP_URL]` |
| **Coverage** | 10 tools (read-only actions only) | 11 tools (all non-destructive actions) |
| **Use case** | CI / offline local check | Live server smoke-test |
### `test-tools.sh` — stdio, no running server needed
```bash ```bash
./tests/mcporter/test-tools.sh # sequential, 25s timeout ./tests/mcporter/test-tools.sh # sequential, 25s timeout
@@ -25,19 +15,9 @@ Live integration smoke-tests for the unraid-mcp server, exercising real API call
Launches `uv run unraid-mcp-server` in stdio mode for each tool call. Requires `mcporter`, `uv`, and `python3` in `PATH`. Good for CI pipelines — no persistent server process needed. Launches `uv run unraid-mcp-server` in stdio mode for each tool call. Requires `mcporter`, `uv`, and `python3` in `PATH`. Good for CI pipelines — no persistent server process needed.
### `test-actions.sh` — HTTP, requires a live server
```bash
./tests/mcporter/test-actions.sh # default: http://localhost:6970/mcp
./tests/mcporter/test-actions.sh http://10.1.0.2:6970/mcp # explicit URL
UNRAID_MCP_URL=http://10.1.0.2:6970/mcp ./tests/mcporter/test-actions.sh
```
Connects to an already-running streamable-http server. Covers all read-only actions across 10 tools (`unraid_settings` is all-mutations and skipped; all destructive mutations are explicitly skipped).
--- ---
## What `test-actions.sh` Tests ## What `test-tools.sh` Tests
### Phase 1 — Param-free reads ### Phase 1 — Param-free reads
@@ -137,15 +117,10 @@ curl -LsSf https://astral.sh/uv/install.sh | sh
# python3 — used for inline JSON extraction # python3 — used for inline JSON extraction
python3 --version # 3.12+ python3 --version # 3.12+
# Running server (for test-actions.sh only)
docker compose up -d
# or
uv run unraid-mcp-server
``` ```
--- ---
## Cleanup ## Cleanup
`test-actions.sh` connects to an existing server and leaves it running; it creates no temporary files. `test-tools.sh` spawns stdio server subprocesses per call — they exit when mcporter finishes each invocation — and may write a timestamped log file under `${TMPDIR:-/tmp}`. Neither script leaves background processes. `test-tools.sh` spawns stdio server subprocesses per call — they exit when mcporter finishes each invocation — and may write a timestamped log file under `${TMPDIR:-/tmp}`. It does not leave background processes.

View File

@@ -1,407 +0,0 @@
#!/usr/bin/env bash
# test-actions.sh — Test all non-destructive Unraid MCP actions via mcporter
#
# Usage:
# ./scripts/test-actions.sh [MCP_URL]
#
# Default MCP_URL: http://localhost:6970/mcp
# Skips: destructive (confirm=True required), state-changing mutations,
# and actions requiring IDs not yet discovered.
#
# Phase 1: param-free reads
# Phase 2: ID-discovered reads (container, network, disk, vm, key, log)
set -euo pipefail
MCP_URL="${1:-${UNRAID_MCP_URL:-http://localhost:6970/mcp}}"
# ── colours ──────────────────────────────────────────────────────────────────
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'
CYAN='\033[0;36m'; BOLD='\033[1m'; NC='\033[0m'
PASS=0; FAIL=0; SKIP=0
declare -a FAILED_TESTS=()
# ── helpers ───────────────────────────────────────────────────────────────────
mcall() {
# mcall <tool> <json-args>
local tool="$1" args="$2"
mcporter call \
--http-url "$MCP_URL" \
--allow-http \
--tool "$tool" \
--args "$args" \
--output json \
2>&1
}
_check_output() {
# Returns 0 if output looks like a successful JSON response, 1 otherwise.
local output="$1" exit_code="$2"
[[ $exit_code -ne 0 ]] && return 1
echo "$output" | python3 -c "
import json, sys
try:
d = json.load(sys.stdin)
if isinstance(d, dict) and (d.get('isError') or d.get('error') or 'ToolError' in str(d)):
sys.exit(1)
except Exception:
pass
sys.exit(0)
" 2>/dev/null
}
run_test() {
# Print result; do NOT echo the JSON body (kept quiet for readability).
local label="$1" tool="$2" args="$3"
printf " %-60s" "$label"
local output exit_code=0
output=$(mcall "$tool" "$args" 2>&1) || exit_code=$?
if _check_output "$output" "$exit_code"; then
echo -e "${GREEN}PASS${NC}"
((PASS++)) || true
else
echo -e "${RED}FAIL${NC}"
((FAIL++)) || true
FAILED_TESTS+=("$label")
# Show first 3 lines of error detail, indented
echo "$output" | head -3 | sed 's/^/ /'
fi
}
run_test_capture() {
# Like run_test but echoes raw JSON to stdout for ID extraction by caller.
# Status lines go to stderr so the caller's $() captures only clean JSON.
local label="$1" tool="$2" args="$3"
local output exit_code=0
printf " %-60s" "$label" >&2
output=$(mcall "$tool" "$args" 2>&1) || exit_code=$?
if _check_output "$output" "$exit_code"; then
echo -e "${GREEN}PASS${NC}" >&2
((PASS++)) || true
else
echo -e "${RED}FAIL${NC}" >&2
((FAIL++)) || true
FAILED_TESTS+=("$label")
echo "$output" | head -3 | sed 's/^/ /' >&2
fi
echo "$output" # pure JSON → captured by caller's $()
}
extract_id() {
# Extract an ID from JSON output using a Python snippet.
# Usage: ID=$(extract_id "$JSON_OUTPUT" "$LABEL" 'python expression')
# If JSON parsing fails (malformed mcporter output), record a FAIL.
# If parsing succeeds but finds no items, return empty (caller skips).
local json_input="$1" label="$2" py_code="$3"
local result="" py_exit=0 parse_err=""
# Capture stdout (the extracted ID) and stderr (any parse errors) separately.
# A temp file is needed because $() can only capture one stream.
local errfile
errfile=$(mktemp)
result=$(echo "$json_input" | python3 -c "$py_code" 2>"$errfile") || py_exit=$?
parse_err=$(<"$errfile")
rm -f "$errfile"
if [[ $py_exit -ne 0 ]]; then
printf " %-60s${RED}FAIL${NC} (JSON parse error)\n" "$label" >&2
[[ -n "$parse_err" ]] && echo "$parse_err" | head -2 | sed 's/^/ /' >&2
((FAIL++)) || true
FAILED_TESTS+=("$label (JSON parse)")
echo ""
return 1
fi
echo "$result"
}
skip_test() {
local label="$1" reason="$2"
printf " %-60s${YELLOW}SKIP${NC} (%s)\n" "$label" "$reason"
((SKIP++)) || true
}
section() {
echo ""
echo -e "${CYAN}${BOLD}━━━ $1 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
}
# ── connectivity check ────────────────────────────────────────────────────────
echo ""
echo -e "${BOLD}Unraid MCP Non-Destructive Action Test Suite${NC}"
echo -e "Server: ${CYAN}$MCP_URL${NC}"
echo ""
printf "Checking connectivity... "
# Use -s (silent) without -f: a 4xx/406 means the MCP server is up and
# responding correctly to a plain GET — only "connection refused" is fatal.
# Capture curl's exit code directly — don't mask failures with a fallback.
HTTP_CODE=""
curl_exit=0
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 5 "$MCP_URL" 2>/dev/null) || curl_exit=$?
if [[ $curl_exit -ne 0 ]]; then
echo -e "${RED}UNREACHABLE${NC} (curl exit code: $curl_exit)"
echo "Start the server first: docker compose up -d OR uv run unraid-mcp-server"
exit 1
fi
echo -e "${GREEN}OK${NC} (HTTP $HTTP_CODE)"
# ═══════════════════════════════════════════════════════════════════════════════
# PHASE 1 — Param-free read actions
# ═══════════════════════════════════════════════════════════════════════════════
section "unraid_info (19 query actions)"
run_test "info: overview" unraid_info '{"action":"overview"}'
run_test "info: array" unraid_info '{"action":"array"}'
run_test "info: network" unraid_info '{"action":"network"}'
run_test "info: registration" unraid_info '{"action":"registration"}'
run_test "info: connect" unraid_info '{"action":"connect"}'
run_test "info: variables" unraid_info '{"action":"variables"}'
run_test "info: metrics" unraid_info '{"action":"metrics"}'
run_test "info: services" unraid_info '{"action":"services"}'
run_test "info: display" unraid_info '{"action":"display"}'
run_test "info: config" unraid_info '{"action":"config"}'
run_test "info: online" unraid_info '{"action":"online"}'
run_test "info: owner" unraid_info '{"action":"owner"}'
run_test "info: settings" unraid_info '{"action":"settings"}'
run_test "info: server" unraid_info '{"action":"server"}'
run_test "info: servers" unraid_info '{"action":"servers"}'
run_test "info: flash" unraid_info '{"action":"flash"}'
run_test "info: ups_devices" unraid_info '{"action":"ups_devices"}'
run_test "info: ups_device" unraid_info '{"action":"ups_device"}'
run_test "info: ups_config" unraid_info '{"action":"ups_config"}'
skip_test "info: update_server" "mutation — state-changing"
skip_test "info: update_ssh" "mutation — state-changing"
section "unraid_array"
run_test "array: parity_status" unraid_array '{"action":"parity_status"}'
skip_test "array: parity_start" "mutation — starts parity check"
skip_test "array: parity_pause" "mutation — pauses parity check"
skip_test "array: parity_resume" "mutation — resumes parity check"
skip_test "array: parity_cancel" "mutation — cancels parity check"
section "unraid_storage (param-free reads)"
STORAGE_DISKS=$(run_test_capture "storage: disks" unraid_storage '{"action":"disks"}')
run_test "storage: shares" unraid_storage '{"action":"shares"}'
run_test "storage: unassigned" unraid_storage '{"action":"unassigned"}'
LOG_FILES=$(run_test_capture "storage: log_files" unraid_storage '{"action":"log_files"}')
skip_test "storage: flash_backup" "destructive (confirm=True required)"
section "unraid_docker (param-free reads)"
DOCKER_LIST=$(run_test_capture "docker: list" unraid_docker '{"action":"list"}')
DOCKER_NETS=$(run_test_capture "docker: networks" unraid_docker '{"action":"networks"}')
run_test "docker: port_conflicts" unraid_docker '{"action":"port_conflicts"}'
run_test "docker: check_updates" unraid_docker '{"action":"check_updates"}'
run_test "docker: sync_templates" unraid_docker '{"action":"sync_templates"}'
run_test "docker: refresh_digests" unraid_docker '{"action":"refresh_digests"}'
skip_test "docker: start" "mutation — changes container state"
skip_test "docker: stop" "mutation — changes container state"
skip_test "docker: restart" "mutation — changes container state"
skip_test "docker: pause" "mutation — changes container state"
skip_test "docker: unpause" "mutation — changes container state"
skip_test "docker: update" "mutation — updates container image"
skip_test "docker: remove" "destructive (confirm=True required)"
skip_test "docker: update_all" "destructive (confirm=True required)"
skip_test "docker: create_folder" "mutation — changes organizer state"
skip_test "docker: set_folder_children" "mutation — changes organizer state"
skip_test "docker: delete_entries" "destructive (confirm=True required)"
skip_test "docker: move_to_folder" "mutation — changes organizer state"
skip_test "docker: move_to_position" "mutation — changes organizer state"
skip_test "docker: rename_folder" "mutation — changes organizer state"
skip_test "docker: create_folder_with_items" "mutation — changes organizer state"
skip_test "docker: update_view_prefs" "mutation — changes organizer state"
skip_test "docker: reset_template_mappings" "destructive (confirm=True required)"
section "unraid_vm (param-free reads)"
VM_LIST=$(run_test_capture "vm: list" unraid_vm '{"action":"list"}')
skip_test "vm: start" "mutation — changes VM state"
skip_test "vm: stop" "mutation — changes VM state"
skip_test "vm: pause" "mutation — changes VM state"
skip_test "vm: resume" "mutation — changes VM state"
skip_test "vm: reboot" "mutation — changes VM state"
skip_test "vm: force_stop" "destructive (confirm=True required)"
skip_test "vm: reset" "destructive (confirm=True required)"
section "unraid_notifications"
run_test "notifications: overview" unraid_notifications '{"action":"overview"}'
run_test "notifications: list" unraid_notifications '{"action":"list"}'
run_test "notifications: warnings" unraid_notifications '{"action":"warnings"}'
run_test "notifications: recalculate" unraid_notifications '{"action":"recalculate"}'
skip_test "notifications: create" "mutation — creates notification"
skip_test "notifications: create_unique" "mutation — creates notification"
skip_test "notifications: archive" "mutation — changes notification state"
skip_test "notifications: unread" "mutation — changes notification state"
skip_test "notifications: archive_all" "mutation — changes notification state"
skip_test "notifications: archive_many" "mutation — changes notification state"
skip_test "notifications: unarchive_many" "mutation — changes notification state"
skip_test "notifications: unarchive_all" "mutation — changes notification state"
skip_test "notifications: delete" "destructive (confirm=True required)"
skip_test "notifications: delete_archived" "destructive (confirm=True required)"
section "unraid_rclone"
run_test "rclone: list_remotes" unraid_rclone '{"action":"list_remotes"}'
run_test "rclone: config_form" unraid_rclone '{"action":"config_form"}'
skip_test "rclone: create_remote" "mutation — creates remote"
skip_test "rclone: delete_remote" "destructive (confirm=True required)"
section "unraid_users"
run_test "users: me" unraid_users '{"action":"me"}'
section "unraid_keys"
KEYS_LIST=$(run_test_capture "keys: list" unraid_keys '{"action":"list"}')
skip_test "keys: create" "mutation — creates API key"
skip_test "keys: update" "mutation — modifies API key"
skip_test "keys: delete" "destructive (confirm=True required)"
section "unraid_health"
run_test "health: check" unraid_health '{"action":"check"}'
run_test "health: test_connection" unraid_health '{"action":"test_connection"}'
run_test "health: diagnose" unraid_health '{"action":"diagnose"}'
section "unraid_settings (all mutations — skipped)"
skip_test "settings: update" "mutation — modifies settings"
skip_test "settings: update_temperature" "mutation — modifies settings"
skip_test "settings: update_time" "mutation — modifies settings"
skip_test "settings: configure_ups" "destructive (confirm=True required)"
skip_test "settings: update_api" "mutation — modifies settings"
skip_test "settings: connect_sign_in" "mutation — authentication action"
skip_test "settings: connect_sign_out" "mutation — authentication action"
skip_test "settings: setup_remote_access" "destructive (confirm=True required)"
skip_test "settings: enable_dynamic_remote_access" "destructive (confirm=True required)"
# ═══════════════════════════════════════════════════════════════════════════════
# PHASE 2 — ID-discovered read actions
# ═══════════════════════════════════════════════════════════════════════════════
section "Phase 2: ID-discovered reads"
# ── docker container ID ───────────────────────────────────────────────────────
CONTAINER_ID=$(extract_id "$DOCKER_LIST" "docker: extract container ID" "
import json, sys
d = json.load(sys.stdin)
containers = d.get('containers') or d.get('data', {}).get('containers') or []
if isinstance(containers, list) and containers:
c = containers[0]
cid = c.get('id') or c.get('names', [''])[0].lstrip('/')
if cid:
print(cid)
")
if [[ -n "$CONTAINER_ID" ]]; then
run_test "docker: details (id=$CONTAINER_ID)" \
unraid_docker "{\"action\":\"details\",\"container_id\":\"$CONTAINER_ID\"}"
run_test "docker: logs (id=$CONTAINER_ID)" \
unraid_docker "{\"action\":\"logs\",\"container_id\":\"$CONTAINER_ID\",\"tail_lines\":20}"
else
skip_test "docker: details" "no containers found to discover ID"
skip_test "docker: logs" "no containers found to discover ID"
fi
# ── docker network ID ─────────────────────────────────────────────────────────
NETWORK_ID=$(extract_id "$DOCKER_NETS" "docker: extract network ID" "
import json, sys
d = json.load(sys.stdin)
nets = d.get('networks') or d.get('data', {}).get('networks') or []
if isinstance(nets, list) and nets:
nid = nets[0].get('id') or nets[0].get('Id')
if nid:
print(nid)
")
if [[ -n "$NETWORK_ID" ]]; then
run_test "docker: network_details (id=$NETWORK_ID)" \
unraid_docker "{\"action\":\"network_details\",\"network_id\":\"$NETWORK_ID\"}"
else
skip_test "docker: network_details" "no networks found to discover ID"
fi
# ── disk ID ───────────────────────────────────────────────────────────────────
DISK_ID=$(extract_id "$STORAGE_DISKS" "storage: extract disk ID" "
import json, sys
d = json.load(sys.stdin)
disks = d.get('disks') or d.get('data', {}).get('disks') or []
if isinstance(disks, list) and disks:
did = disks[0].get('id') or disks[0].get('device')
if did:
print(did)
")
if [[ -n "$DISK_ID" ]]; then
run_test "storage: disk_details (id=$DISK_ID)" \
unraid_storage "{\"action\":\"disk_details\",\"disk_id\":\"$DISK_ID\"}"
else
skip_test "storage: disk_details" "no disks found to discover ID"
fi
# ── log path ──────────────────────────────────────────────────────────────────
LOG_PATH=$(extract_id "$LOG_FILES" "storage: extract log path" "
import json, sys
d = json.load(sys.stdin)
files = d.get('log_files') or d.get('files') or d.get('data', {}).get('log_files') or []
if isinstance(files, list) and files:
p = files[0].get('path') or (files[0] if isinstance(files[0], str) else None)
if p:
print(p)
")
if [[ -n "$LOG_PATH" ]]; then
run_test "storage: logs (path=$LOG_PATH)" \
unraid_storage "{\"action\":\"logs\",\"log_path\":\"$LOG_PATH\",\"tail_lines\":20}"
else
skip_test "storage: logs" "no log files found to discover path"
fi
# ── VM ID ─────────────────────────────────────────────────────────────────────
VM_ID=$(extract_id "$VM_LIST" "vm: extract VM ID" "
import json, sys
d = json.load(sys.stdin)
vms = d.get('vms') or d.get('data', {}).get('vms') or []
if isinstance(vms, list) and vms:
vid = vms[0].get('uuid') or vms[0].get('id') or vms[0].get('name')
if vid:
print(vid)
")
if [[ -n "$VM_ID" ]]; then
run_test "vm: details (id=$VM_ID)" \
unraid_vm "{\"action\":\"details\",\"vm_id\":\"$VM_ID\"}"
else
skip_test "vm: details" "no VMs found to discover ID"
fi
# ── API key ID ────────────────────────────────────────────────────────────────
KEY_ID=$(extract_id "$KEYS_LIST" "keys: extract key ID" "
import json, sys
d = json.load(sys.stdin)
keys = d.get('keys') or d.get('apiKeys') or d.get('data', {}).get('keys') or []
if isinstance(keys, list) and keys:
kid = keys[0].get('id')
if kid:
print(kid)
")
if [[ -n "$KEY_ID" ]]; then
run_test "keys: get (id=$KEY_ID)" \
unraid_keys "{\"action\":\"get\",\"key_id\":\"$KEY_ID\"}"
else
skip_test "keys: get" "no API keys found to discover ID"
fi
# ═══════════════════════════════════════════════════════════════════════════════
# SUMMARY
# ═══════════════════════════════════════════════════════════════════════════════
TOTAL=$((PASS + FAIL + SKIP))
echo ""
echo -e "${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${BOLD}Results: ${GREEN}${PASS} passed${NC} ${RED}${FAIL} failed${NC} ${YELLOW}${SKIP} skipped${NC} (${TOTAL} total)"
if [[ ${#FAILED_TESTS[@]} -gt 0 ]]; then
echo ""
echo -e "${RED}${BOLD}Failed tests:${NC}"
for t in "${FAILED_TESTS[@]}"; do
echo -e " ${RED}${NC} $t"
done
fi
echo ""
[[ $FAIL -eq 0 ]] && exit 0 || exit 1

View File

@@ -149,8 +149,8 @@ test_notifications_delete() {
# Create the notification # Create the notification
local create_raw local create_raw
create_raw="$(mcall unraid_notifications \ create_raw="$(mcall unraid \
'{"action":"create","title":"mcp-test-delete","subject":"MCP destructive test","description":"Safe to delete","importance":"INFO"}')" '{"action":"notification","subaction":"create","title":"mcp-test-delete","subject":"MCP destructive test","description":"Safe to delete","importance":"INFO"}')"
local create_ok local create_ok
create_ok="$(python3 -c "import json,sys; d=json.loads('''${create_raw}'''); print(d.get('success', False))" 2>/dev/null)" create_ok="$(python3 -c "import json,sys; d=json.loads('''${create_raw}'''); print(d.get('success', False))" 2>/dev/null)"
if [[ "${create_ok}" != "True" ]]; then if [[ "${create_ok}" != "True" ]]; then
@@ -161,7 +161,7 @@ test_notifications_delete() {
# The create response ID doesn't match the stored filename — list and find by title. # The create response ID doesn't match the stored filename — list and find by title.
# Use the LAST match so a stale notification with the same title is bypassed. # Use the LAST match so a stale notification with the same title is bypassed.
local list_raw nid local list_raw nid
list_raw="$(mcall unraid_notifications '{"action":"list","notification_type":"UNREAD"}')" list_raw="$(mcall unraid '{"action":"notification","subaction":"list","notification_type":"UNREAD"}')"
nid="$(python3 -c " nid="$(python3 -c "
import json,sys import json,sys
d = json.loads('''${list_raw}''') d = json.loads('''${list_raw}''')
@@ -177,8 +177,8 @@ print(matches[0] if matches else '')
fi fi
local del_raw local del_raw
del_raw="$(mcall unraid_notifications \ del_raw="$(mcall unraid \
"{\"action\":\"delete\",\"notification_id\":\"${nid}\",\"notification_type\":\"UNREAD\",\"confirm\":true}")" "{\"action\":\"notification\",\"subaction\":\"delete\",\"notification_id\":\"${nid}\",\"notification_type\":\"UNREAD\",\"confirm\":true}")"
# success=true OR deleteNotification key present (raw GraphQL response) both indicate success # success=true OR deleteNotification key present (raw GraphQL response) both indicate success
local success local success
success="$(python3 -c " success="$(python3 -c "
@@ -190,7 +190,7 @@ print(ok)
if [[ "${success}" != "True" ]]; then if [[ "${success}" != "True" ]]; then
# Leak: notification created but not deleted — archive it so it doesn't clutter the feed # Leak: notification created but not deleted — archive it so it doesn't clutter the feed
mcall unraid_notifications "{\"action\":\"archive\",\"notification_id\":\"${nid}\"}" &>/dev/null || true mcall unraid "{\"action\":\"notification\",\"subaction\":\"archive\",\"notification_id\":\"${nid}\"}" &>/dev/null || true
fail_test "${label}" "delete did not return success=true: ${del_raw} (notification archived as fallback cleanup)" fail_test "${label}" "delete did not return success=true: ${del_raw} (notification archived as fallback cleanup)"
return return
fi fi
@@ -201,7 +201,7 @@ print(ok)
if ${CONFIRM}; then if ${CONFIRM}; then
test_notifications_delete test_notifications_delete
else else
dry_run "notifications: delete [create notification → mcall unraid_notifications delete]" dry_run "notifications: delete [create notification → mcall unraid action=notification subaction=delete]"
fi fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -227,7 +227,7 @@ test_keys_delete() {
# Guard: abort if test key already exists (don't delete a real key) # Guard: abort if test key already exists (don't delete a real key)
# Note: API key names cannot contain hyphens — use "mcp test key" # Note: API key names cannot contain hyphens — use "mcp test key"
local existing_keys local existing_keys
existing_keys="$(mcall unraid_keys '{"action":"list"}')" existing_keys="$(mcall unraid '{"action":"key","subaction":"list"}')"
if python3 -c " if python3 -c "
import json,sys import json,sys
d = json.loads('''${existing_keys}''') d = json.loads('''${existing_keys}''')
@@ -241,8 +241,8 @@ sys.exit(1 if any(k.get('name') == 'mcp test key' for k in keys) else 0)
fi fi
local create_raw local create_raw
create_raw="$(mcall unraid_keys \ create_raw="$(mcall unraid \
'{"action":"create","name":"mcp test key","roles":["VIEWER"]}')" '{"action":"key","subaction":"create","name":"mcp test key","roles":["VIEWER"]}')"
local kid local kid
kid="$(python3 -c "import json,sys; d=json.loads('''${create_raw}'''); print(d.get('key',{}).get('id',''))" 2>/dev/null)" kid="$(python3 -c "import json,sys; d=json.loads('''${create_raw}'''); print(d.get('key',{}).get('id',''))" 2>/dev/null)"
@@ -252,20 +252,20 @@ sys.exit(1 if any(k.get('name') == 'mcp test key' for k in keys) else 0)
fi fi
local del_raw local del_raw
del_raw="$(mcall unraid_keys "{\"action\":\"delete\",\"key_id\":\"${kid}\",\"confirm\":true}")" del_raw="$(mcall unraid "{\"action\":\"key\",\"subaction\":\"delete\",\"key_id\":\"${kid}\",\"confirm\":true}")"
local success local success
success="$(python3 -c "import json,sys; d=json.loads('''${del_raw}'''); print(d.get('success', False))" 2>/dev/null)" success="$(python3 -c "import json,sys; d=json.loads('''${del_raw}'''); print(d.get('success', False))" 2>/dev/null)"
if [[ "${success}" != "True" ]]; then if [[ "${success}" != "True" ]]; then
# Cleanup: attempt to delete the leaked key so future runs are not blocked # Cleanup: attempt to delete the leaked key so future runs are not blocked
mcall unraid_keys "{\"action\":\"delete\",\"key_id\":\"${kid}\",\"confirm\":true}" &>/dev/null || true mcall unraid "{\"action\":\"key\",\"subaction\":\"delete\",\"key_id\":\"${kid}\",\"confirm\":true}" &>/dev/null || true
fail_test "${label}" "delete did not return success=true: ${del_raw} (key delete re-attempted as fallback cleanup)" fail_test "${label}" "delete did not return success=true: ${del_raw} (key delete re-attempted as fallback cleanup)"
return return
fi fi
# Verify gone # Verify gone
local list_raw local list_raw
list_raw="$(mcall unraid_keys '{"action":"list"}')" list_raw="$(mcall unraid '{"action":"key","subaction":"list"}')"
if python3 -c " if python3 -c "
import json,sys import json,sys
d = json.loads('''${list_raw}''') d = json.loads('''${list_raw}''')
@@ -281,7 +281,7 @@ sys.exit(0 if not any(k.get('id') == '${kid}' for k in keys) else 1)
if ${CONFIRM}; then if ${CONFIRM}; then
test_keys_delete test_keys_delete
else else
dry_run "keys: delete [create test key → mcall unraid_keys delete]" dry_run "keys: delete [create test key → mcall unraid action=key subaction=delete]"
fi fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -215,6 +215,7 @@ except Exception as e:
mcporter_call() { mcporter_call() {
local args_json="${1:?args_json required}" local args_json="${1:?args_json required}"
# Redirect stderr to the log file so startup warnings/logs don't pollute the JSON stdout.
mcporter call \ mcporter call \
--stdio "uv run unraid-mcp-server" \ --stdio "uv run unraid-mcp-server" \
--cwd "${PROJECT_DIR}" \ --cwd "${PROJECT_DIR}" \
@@ -223,7 +224,7 @@ mcporter_call() {
--args "${args_json}" \ --args "${args_json}" \
--timeout "${CALL_TIMEOUT_MS}" \ --timeout "${CALL_TIMEOUT_MS}" \
--output json \ --output json \
2>&1 2>>"${LOG_FILE}"
} }
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -239,7 +240,7 @@ run_test() {
t0="$(date +%s%N)" t0="$(date +%s%N)"
local output local output
output="$(mcporter_call "${args}" 2>&1)" || true output="$(mcporter_call "${args}")" || true
local elapsed_ms local elapsed_ms
elapsed_ms="$(( ( $(date +%s%N) - t0 ) / 1000000 ))" elapsed_ms="$(( ( $(date +%s%N) - t0 ) / 1000000 ))"
@@ -659,7 +660,7 @@ suite_live() {
run_test "live: memory" '{"action":"live","subaction":"memory"}' run_test "live: memory" '{"action":"live","subaction":"memory"}'
run_test "live: cpu_telemetry" '{"action":"live","subaction":"cpu_telemetry"}' run_test "live: cpu_telemetry" '{"action":"live","subaction":"cpu_telemetry"}'
run_test "live: notifications_overview" '{"action":"live","subaction":"notifications_overview"}' run_test "live: notifications_overview" '{"action":"live","subaction":"notifications_overview"}'
run_test "live: log_tail" '{"action":"live","subaction":"log_tail"}' run_test "live: log_tail" '{"action":"live","subaction":"log_tail","path":"/var/log/syslog"}'
} }
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------

View File

@@ -1,155 +0,0 @@
"""Tests for ApiKeyVerifier and _build_auth() in server.py."""
import importlib
from unittest.mock import MagicMock, patch
import pytest
import unraid_mcp.server as srv
# ---------------------------------------------------------------------------
# ApiKeyVerifier unit tests
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
async def test_api_key_verifier_accepts_correct_key():
"""Returns AccessToken when the presented token matches the configured key."""
verifier = srv.ApiKeyVerifier("secret-key-abc123")
result = await verifier.verify_token("secret-key-abc123")
assert result is not None
assert result.client_id == "api-key-client"
assert result.token == "secret-key-abc123"
@pytest.mark.asyncio
async def test_api_key_verifier_rejects_wrong_key():
"""Returns None when the token does not match."""
verifier = srv.ApiKeyVerifier("secret-key-abc123")
result = await verifier.verify_token("wrong-key")
assert result is None
@pytest.mark.asyncio
async def test_api_key_verifier_rejects_empty_token():
"""Returns None for an empty string token."""
verifier = srv.ApiKeyVerifier("secret-key-abc123")
result = await verifier.verify_token("")
assert result is None
@pytest.mark.asyncio
async def test_api_key_verifier_empty_key_rejects_empty_token():
"""When initialised with empty key, even an empty token is rejected.
An empty UNRAID_MCP_API_KEY means auth is disabled — ApiKeyVerifier
should not be instantiated in that case. But if it is, it must not
grant access via an empty bearer token.
"""
verifier = srv.ApiKeyVerifier("")
result = await verifier.verify_token("")
assert result is None
# ---------------------------------------------------------------------------
# _build_auth() integration tests
# ---------------------------------------------------------------------------
def test_build_auth_returns_none_when_nothing_configured(monkeypatch):
"""Returns None when neither Google OAuth nor API key is set."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "")
monkeypatch.setenv("UNRAID_MCP_API_KEY", "")
import unraid_mcp.config.settings as s
importlib.reload(s)
result = srv._build_auth()
assert result is None
def test_build_auth_returns_api_key_verifier_when_only_api_key_set(monkeypatch):
"""Returns ApiKeyVerifier when UNRAID_MCP_API_KEY is set but Google OAuth is not."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "")
monkeypatch.setenv("UNRAID_MCP_API_KEY", "my-secret-api-key")
import unraid_mcp.config.settings as s
importlib.reload(s)
result = srv._build_auth()
assert isinstance(result, srv.ApiKeyVerifier)
def test_build_auth_returns_google_provider_when_only_oauth_set(monkeypatch):
"""Returns GoogleProvider when Google OAuth vars are set but no API key."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
monkeypatch.setenv("UNRAID_MCP_API_KEY", "")
monkeypatch.setenv("UNRAID_MCP_JWT_SIGNING_KEY", "x" * 32)
import unraid_mcp.config.settings as s
importlib.reload(s)
mock_provider = MagicMock()
with patch("unraid_mcp.server.GoogleProvider", return_value=mock_provider):
result = srv._build_auth()
assert result is mock_provider
def test_build_auth_returns_multi_auth_when_both_configured(monkeypatch):
"""Returns MultiAuth when both Google OAuth and UNRAID_MCP_API_KEY are set."""
from fastmcp.server.auth import MultiAuth
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
monkeypatch.setenv("UNRAID_MCP_API_KEY", "my-secret-api-key")
monkeypatch.setenv("UNRAID_MCP_JWT_SIGNING_KEY", "x" * 32)
import unraid_mcp.config.settings as s
importlib.reload(s)
mock_provider = MagicMock()
with patch("unraid_mcp.server.GoogleProvider", return_value=mock_provider):
result = srv._build_auth()
assert isinstance(result, MultiAuth)
# Server is the Google provider
assert result.server is mock_provider
# One additional verifier — the ApiKeyVerifier
assert len(result.verifiers) == 1
assert isinstance(result.verifiers[0], srv.ApiKeyVerifier)
def test_build_auth_multi_auth_api_key_verifier_uses_correct_key(monkeypatch):
"""The ApiKeyVerifier inside MultiAuth is seeded with the configured key."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
monkeypatch.setenv("UNRAID_MCP_API_KEY", "super-secret-token")
monkeypatch.setenv("UNRAID_MCP_JWT_SIGNING_KEY", "x" * 32)
import unraid_mcp.config.settings as s
importlib.reload(s)
with patch("unraid_mcp.server.GoogleProvider", return_value=MagicMock()):
result = srv._build_auth()
verifier = result.verifiers[0]
assert verifier._api_key == "super-secret-token"

View File

@@ -1,115 +0,0 @@
"""Tests for _build_google_auth() in server.py."""
import importlib
from unittest.mock import MagicMock, patch
from unraid_mcp.server import _build_google_auth
def test_build_google_auth_returns_none_when_unconfigured(monkeypatch):
"""Returns None when Google OAuth env vars are absent."""
# Use explicit empty values so dotenv reload cannot re-inject from ~/.unraid-mcp/.env.
monkeypatch.setenv("GOOGLE_CLIENT_ID", "")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "")
import unraid_mcp.config.settings as s
importlib.reload(s)
result = _build_google_auth()
assert result is None
def test_build_google_auth_returns_provider_when_configured(monkeypatch):
"""Returns GoogleProvider instance when all required vars are set."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
monkeypatch.setenv("UNRAID_MCP_JWT_SIGNING_KEY", "x" * 32)
import unraid_mcp.config.settings as s
importlib.reload(s)
mock_provider = MagicMock()
mock_provider_class = MagicMock(return_value=mock_provider)
with patch("unraid_mcp.server.GoogleProvider", mock_provider_class):
result = _build_google_auth()
assert result is mock_provider
mock_provider_class.assert_called_once_with(
client_id="test-id.apps.googleusercontent.com",
client_secret="GOCSPX-test-secret",
base_url="http://10.1.0.2:6970",
extra_authorize_params={"access_type": "online", "prompt": "consent"},
require_authorization_consent=False,
jwt_signing_key="x" * 32,
)
def test_build_google_auth_omits_jwt_key_when_empty(monkeypatch):
"""jwt_signing_key is omitted (not passed as empty string) when not set."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
# Use setenv("") not delenv so dotenv reload can't re-inject from ~/.unraid-mcp/.env
monkeypatch.setenv("UNRAID_MCP_JWT_SIGNING_KEY", "")
import unraid_mcp.config.settings as s
importlib.reload(s)
mock_provider_class = MagicMock(return_value=MagicMock())
with patch("unraid_mcp.server.GoogleProvider", mock_provider_class):
_build_google_auth()
call_kwargs = mock_provider_class.call_args.kwargs
assert "jwt_signing_key" not in call_kwargs
def test_build_google_auth_warns_on_stdio_transport(monkeypatch):
"""Logs a warning when Google auth is configured but transport is stdio."""
monkeypatch.setenv("GOOGLE_CLIENT_ID", "test-id.apps.googleusercontent.com")
monkeypatch.setenv("GOOGLE_CLIENT_SECRET", "GOCSPX-test-secret")
monkeypatch.setenv("UNRAID_MCP_BASE_URL", "http://10.1.0.2:6970")
monkeypatch.setenv("UNRAID_MCP_TRANSPORT", "stdio")
import unraid_mcp.config.settings as s
importlib.reload(s)
warning_messages: list[str] = []
with (
patch("unraid_mcp.server.GoogleProvider", MagicMock(return_value=MagicMock())),
patch("unraid_mcp.server.logger") as mock_logger,
):
mock_logger.warning.side_effect = lambda msg, *a, **kw: warning_messages.append(msg)
_build_google_auth()
assert any("stdio" in m.lower() for m in warning_messages)
def test_mcp_instance_has_no_auth_by_default():
"""The FastMCP mcp instance has no auth provider when Google vars are absent."""
import os
for var in ("GOOGLE_CLIENT_ID", "GOOGLE_CLIENT_SECRET", "UNRAID_MCP_BASE_URL"):
os.environ[var] = ""
import importlib
import unraid_mcp.config.settings as s
importlib.reload(s)
import unraid_mcp.server as srv
importlib.reload(srv)
# FastMCP stores auth on ._auth_provider or .auth
auth = getattr(srv.mcp, "_auth_provider", None) or getattr(srv.mcp, "auth", None)
assert auth is None

View File

@@ -1,91 +0,0 @@
"""Tests for Google OAuth settings loading."""
import importlib
from typing import Any
def _reload_settings(monkeypatch, overrides: dict) -> Any:
"""Reload settings module with given env vars set."""
for k, v in overrides.items():
monkeypatch.setenv(k, v)
import unraid_mcp.config.settings as mod
importlib.reload(mod)
return mod
def test_google_auth_defaults_to_empty(monkeypatch):
"""Google auth vars default to empty string when not set."""
# Use setenv("", "") rather than delenv so dotenv reload can't re-inject values
# from ~/.unraid-mcp/.env (load_dotenv won't override existing env vars).
mod = _reload_settings(
monkeypatch,
{
"GOOGLE_CLIENT_ID": "",
"GOOGLE_CLIENT_SECRET": "",
"UNRAID_MCP_BASE_URL": "",
"UNRAID_MCP_JWT_SIGNING_KEY": "",
},
)
assert mod.GOOGLE_CLIENT_ID == ""
assert mod.GOOGLE_CLIENT_SECRET == ""
assert mod.UNRAID_MCP_BASE_URL == ""
assert mod.UNRAID_MCP_JWT_SIGNING_KEY == ""
def test_google_auth_reads_env_vars(monkeypatch):
"""Google auth vars are read from environment."""
mod = _reload_settings(
monkeypatch,
{
"GOOGLE_CLIENT_ID": "test-client-id.apps.googleusercontent.com",
"GOOGLE_CLIENT_SECRET": "GOCSPX-test-secret",
"UNRAID_MCP_BASE_URL": "http://10.1.0.2:6970",
"UNRAID_MCP_JWT_SIGNING_KEY": "a" * 32,
},
)
assert mod.GOOGLE_CLIENT_ID == "test-client-id.apps.googleusercontent.com"
assert mod.GOOGLE_CLIENT_SECRET == "GOCSPX-test-secret"
assert mod.UNRAID_MCP_BASE_URL == "http://10.1.0.2:6970"
assert mod.UNRAID_MCP_JWT_SIGNING_KEY == "a" * 32
def test_google_auth_enabled_requires_both_vars(monkeypatch):
"""is_google_auth_configured() requires both client_id and client_secret."""
# Only client_id — not configured
mod = _reload_settings(
monkeypatch,
{
"GOOGLE_CLIENT_ID": "test-id",
"GOOGLE_CLIENT_SECRET": "",
"UNRAID_MCP_BASE_URL": "http://10.1.0.2:6970",
},
)
monkeypatch.delenv("GOOGLE_CLIENT_SECRET", raising=False)
importlib.reload(mod)
assert not mod.is_google_auth_configured()
# Both set — configured
mod2 = _reload_settings(
monkeypatch,
{
"GOOGLE_CLIENT_ID": "test-id",
"GOOGLE_CLIENT_SECRET": "test-secret",
"UNRAID_MCP_BASE_URL": "http://10.1.0.2:6970",
},
)
assert mod2.is_google_auth_configured()
def test_google_auth_requires_base_url(monkeypatch):
"""is_google_auth_configured() is False when base_url is missing."""
mod = _reload_settings(
monkeypatch,
{
"GOOGLE_CLIENT_ID": "test-id",
"GOOGLE_CLIENT_SECRET": "test-secret",
},
)
monkeypatch.delenv("UNRAID_MCP_BASE_URL", raising=False)
importlib.reload(mod)
assert not mod.is_google_auth_configured()

View File

@@ -17,20 +17,26 @@ class TestGateDestructiveAction:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_non_destructive_action_passes_through(self) -> None: async def test_non_destructive_action_passes_through(self) -> None:
"""Non-destructive actions are never blocked.""" """Non-destructive actions are never blocked."""
await gate_destructive_action(None, "list", DESTRUCTIVE, False, "irrelevant") await gate_destructive_action(
None, "list", DESTRUCTIVE, confirm=False, description="irrelevant"
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_confirm_true_bypasses_elicitation(self) -> None: async def test_confirm_true_bypasses_elicitation(self) -> None:
"""confirm=True skips elicitation entirely.""" """confirm=True skips elicitation entirely."""
with patch("unraid_mcp.core.guards.elicit_destructive_confirmation") as mock_elicit: with patch("unraid_mcp.core.guards.elicit_destructive_confirmation") as mock_elicit:
await gate_destructive_action(None, "delete", DESTRUCTIVE, True, "desc") await gate_destructive_action(
None, "delete", DESTRUCTIVE, confirm=True, description="desc"
)
mock_elicit.assert_not_called() mock_elicit.assert_not_called()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_no_ctx_raises_tool_error(self) -> None: async def test_no_ctx_raises_tool_error(self) -> None:
"""ctx=None means elicitation returns False → ToolError.""" """ctx=None means elicitation returns False → ToolError."""
with pytest.raises(ToolError, match="not confirmed"): with pytest.raises(ToolError, match="not confirmed"):
await gate_destructive_action(None, "delete", DESTRUCTIVE, False, "desc") await gate_destructive_action(
None, "delete", DESTRUCTIVE, confirm=False, description="desc"
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicitation_accepted_does_not_raise(self) -> None: async def test_elicitation_accepted_does_not_raise(self) -> None:
@@ -40,7 +46,9 @@ class TestGateDestructiveAction:
new_callable=AsyncMock, new_callable=AsyncMock,
return_value=True, return_value=True,
): ):
await gate_destructive_action(object(), "delete", DESTRUCTIVE, False, "desc") await gate_destructive_action(
object(), "delete", DESTRUCTIVE, confirm=False, description="desc"
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicitation_declined_raises_tool_error(self) -> None: async def test_elicitation_declined_raises_tool_error(self) -> None:
@@ -53,7 +61,9 @@ class TestGateDestructiveAction:
) as mock_elicit, ) as mock_elicit,
pytest.raises(ToolError, match="confirm=True"), pytest.raises(ToolError, match="confirm=True"),
): ):
await gate_destructive_action(object(), "delete", DESTRUCTIVE, False, "desc") await gate_destructive_action(
object(), "delete", DESTRUCTIVE, confirm=False, description="desc"
)
mock_elicit.assert_called_once() mock_elicit.assert_called_once()
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -65,7 +75,7 @@ class TestGateDestructiveAction:
return_value=True, return_value=True,
) as mock_elicit: ) as mock_elicit:
await gate_destructive_action( await gate_destructive_action(
object(), "delete", DESTRUCTIVE, False, "Delete everything." object(), "delete", DESTRUCTIVE, confirm=False, description="Delete everything."
) )
_, _, desc = mock_elicit.call_args.args _, _, desc = mock_elicit.call_args.args
assert desc == "Delete everything." assert desc == "Delete everything."
@@ -79,7 +89,9 @@ class TestGateDestructiveAction:
new_callable=AsyncMock, new_callable=AsyncMock,
return_value=True, return_value=True,
) as mock_elicit: ) as mock_elicit:
await gate_destructive_action(object(), "wipe", DESTRUCTIVE, False, descs) await gate_destructive_action(
object(), "wipe", DESTRUCTIVE, confirm=False, description=descs
)
_, _, desc = mock_elicit.call_args.args _, _, desc = mock_elicit.call_args.args
assert desc == "Wipe desc." assert desc == "Wipe desc."
@@ -87,4 +99,6 @@ class TestGateDestructiveAction:
async def test_error_message_contains_action_name(self) -> None: async def test_error_message_contains_action_name(self) -> None:
"""ToolError message includes the action name.""" """ToolError message includes the action name."""
with pytest.raises(ToolError, match="'delete'"): with pytest.raises(ToolError, match="'delete'"):
await gate_destructive_action(None, "delete", DESTRUCTIVE, False, "desc") await gate_destructive_action(
None, "delete", DESTRUCTIVE, confirm=False, description="desc"
)

View File

@@ -404,7 +404,8 @@ async def test_health_setup_declined_message_includes_manual_path() -> None:
real_path_str = str(CREDENTIALS_ENV_PATH) real_path_str = str(CREDENTIALS_ENV_PATH)
mock_path = MagicMock() mock_path = MagicMock()
mock_path.exists.return_value = False mock_path.exists.return_value = False
type(mock_path).__str__ = lambda self: real_path_str # type: ignore[method-assign] # Override __str__ on the instance's mock directly — avoids mutating the shared MagicMock class.
mock_path.__str__ = MagicMock(return_value=real_path_str)
with ( with (
patch("unraid_mcp.config.settings.CREDENTIALS_ENV_PATH", mock_path), patch("unraid_mcp.config.settings.CREDENTIALS_ENV_PATH", mock_path),

View File

@@ -1,6 +1,7 @@
"""Tests for key subactions of the consolidated unraid tool.""" """Tests for key subactions of the consolidated unraid tool."""
from collections.abc import Generator from collections.abc import Callable, Generator
from typing import Any
from unittest.mock import AsyncMock, patch from unittest.mock import AsyncMock, patch
import pytest import pytest
@@ -15,7 +16,7 @@ def _mock_graphql() -> Generator[AsyncMock, None, None]:
yield mock yield mock
def _make_tool(): def _make_tool() -> Callable[..., Any]:
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid") return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")

View File

@@ -20,20 +20,23 @@ def _make_tool():
class TestRcloneValidation: class TestRcloneValidation:
async def test_delete_requires_confirm(self) -> None: async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="not confirmed"): with pytest.raises(ToolError, match="not confirmed"):
await tool_fn(action="rclone", subaction="delete_remote", name="gdrive") await tool_fn(action="rclone", subaction="delete_remote", name="gdrive")
_mock_graphql.assert_not_awaited()
async def test_create_requires_fields(self) -> None: async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="requires name"): with pytest.raises(ToolError, match="requires name"):
await tool_fn(action="rclone", subaction="create_remote") await tool_fn(action="rclone", subaction="create_remote")
_mock_graphql.assert_not_awaited()
async def test_delete_requires_name(self) -> None: async def test_delete_requires_name(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="name is required"): with pytest.raises(ToolError, match="name is required"):
await tool_fn(action="rclone", subaction="delete_remote", confirm=True) await tool_fn(action="rclone", subaction="delete_remote", confirm=True)
_mock_graphql.assert_not_awaited()
class TestRcloneActions: class TestRcloneActions:

View File

@@ -64,6 +64,8 @@ class TestLiveResourcesUseManagerCache:
with patch("unraid_mcp.subscriptions.resources.subscription_manager") as mock_mgr: with patch("unraid_mcp.subscriptions.resources.subscription_manager") as mock_mgr:
mock_mgr.get_resource_data = AsyncMock(return_value=None) mock_mgr.get_resource_data = AsyncMock(return_value=None)
mock_mgr.last_error = {action: "WebSocket auth failed"} mock_mgr.last_error = {action: "WebSocket auth failed"}
mock_mgr.connection_states = {action: "auth_failed"}
mock_mgr.auto_start_enabled = True
mcp = _make_resources() mcp = _make_resources()
# Accessing FastMCP internals intentionally for unit test isolation. # Accessing FastMCP internals intentionally for unit test isolation.
# This may break on FastMCP upgrades — consider a make_resource_fn() helper if it does. # This may break on FastMCP upgrades — consider a make_resource_fn() helper if it does.

View File

@@ -1,5 +1,7 @@
import os
import stat
from pathlib import Path from pathlib import Path
from unittest.mock import patch from unittest.mock import AsyncMock, MagicMock, patch
import pytest import pytest
@@ -100,8 +102,6 @@ def test_run_server_does_not_exit_when_creds_missing(monkeypatch):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_and_configure_writes_env_file(tmp_path): async def test_elicit_and_configure_writes_env_file(tmp_path):
"""elicit_and_configure writes a .env file and calls apply_runtime_config.""" """elicit_and_configure writes a .env file and calls apply_runtime_config."""
from unittest.mock import AsyncMock, MagicMock, patch
from unraid_mcp.core.setup import elicit_and_configure from unraid_mcp.core.setup import elicit_and_configure
mock_ctx = MagicMock() mock_ctx = MagicMock()
@@ -133,7 +133,6 @@ async def test_elicit_and_configure_writes_env_file(tmp_path):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_and_configure_returns_false_on_decline(): async def test_elicit_and_configure_returns_false_on_decline():
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_and_configure from unraid_mcp.core.setup import elicit_and_configure
@@ -148,7 +147,6 @@ async def test_elicit_and_configure_returns_false_on_decline():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_and_configure_returns_false_on_cancel(): async def test_elicit_and_configure_returns_false_on_cancel():
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_and_configure from unraid_mcp.core.setup import elicit_and_configure
@@ -181,9 +179,6 @@ async def test_make_graphql_request_raises_sentinel_when_unconfigured():
settings_mod.UNRAID_API_KEY = original_key settings_mod.UNRAID_API_KEY = original_key
import os # noqa: E402 — needed for reload-based tests below
def test_credentials_dir_defaults_to_home_unraid_mcp(): def test_credentials_dir_defaults_to_home_unraid_mcp():
"""CREDENTIALS_DIR defaults to ~/.unraid-mcp when env var is not set.""" """CREDENTIALS_DIR defaults to ~/.unraid-mcp when env var is not set."""
import importlib import importlib
@@ -223,9 +218,6 @@ def test_credentials_env_path_is_dot_env_inside_credentials_dir():
assert s.CREDENTIALS_ENV_PATH == s.CREDENTIALS_DIR / ".env" assert s.CREDENTIALS_ENV_PATH == s.CREDENTIALS_DIR / ".env"
import stat # noqa: E402
def test_write_env_creates_credentials_dir_with_700_permissions(tmp_path): def test_write_env_creates_credentials_dir_with_700_permissions(tmp_path):
"""_write_env creates CREDENTIALS_DIR with mode 700 (owner-only).""" """_write_env creates CREDENTIALS_DIR with mode 700 (owner-only)."""
from unraid_mcp.core.setup import _write_env from unraid_mcp.core.setup import _write_env
@@ -342,7 +334,6 @@ def test_write_env_updates_existing_credentials_in_place(tmp_path):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_and_configure_returns_false_when_client_not_supported(): async def test_elicit_and_configure_returns_false_when_client_not_supported():
"""elicit_and_configure returns False when client raises NotImplementedError.""" """elicit_and_configure returns False when client raises NotImplementedError."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_and_configure from unraid_mcp.core.setup import elicit_and_configure
@@ -404,7 +395,6 @@ async def test_elicit_reset_confirmation_returns_false_when_ctx_none():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_returns_true_when_user_confirms(): async def test_elicit_reset_confirmation_returns_true_when_user_confirms():
"""Returns True when the user accepts and answers True.""" """Returns True when the user accepts and answers True."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -421,7 +411,6 @@ async def test_elicit_reset_confirmation_returns_true_when_user_confirms():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_returns_false_when_user_answers_false(): async def test_elicit_reset_confirmation_returns_false_when_user_answers_false():
"""Returns False when the user accepts but answers False (does not want to reset).""" """Returns False when the user accepts but answers False (does not want to reset)."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -438,7 +427,6 @@ async def test_elicit_reset_confirmation_returns_false_when_user_answers_false()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_returns_false_when_declined(): async def test_elicit_reset_confirmation_returns_false_when_declined():
"""Returns False when the user declines via action (dismisses the prompt).""" """Returns False when the user declines via action (dismisses the prompt)."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -454,7 +442,6 @@ async def test_elicit_reset_confirmation_returns_false_when_declined():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_returns_false_when_cancelled(): async def test_elicit_reset_confirmation_returns_false_when_cancelled():
"""Returns False when the user cancels the prompt.""" """Returns False when the user cancels the prompt."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -468,13 +455,13 @@ async def test_elicit_reset_confirmation_returns_false_when_cancelled():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_returns_true_when_not_implemented(): async def test_elicit_reset_confirmation_returns_false_when_not_implemented():
"""Returns True (proceed with reset) when the MCP client does not support elicitation. """Returns False (decline reset) when the MCP client does not support elicitation.
Non-interactive clients (stdio, CI) must not be permanently blocked from Auto-approving a destructive credential reset on non-interactive clients would
reconfiguring credentials just because they can't ask the user a yes/no question. silently overwrite working credentials. Callers must use a client that supports
elicitation or configure credentials directly via the .env file.
""" """
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -482,13 +469,12 @@ async def test_elicit_reset_confirmation_returns_true_when_not_implemented():
mock_ctx.elicit = AsyncMock(side_effect=NotImplementedError("elicitation not supported")) mock_ctx.elicit = AsyncMock(side_effect=NotImplementedError("elicitation not supported"))
result = await elicit_reset_confirmation(mock_ctx, "https://example.com") result = await elicit_reset_confirmation(mock_ctx, "https://example.com")
assert result is True assert result is False
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_elicit_reset_confirmation_includes_current_url_in_prompt(): async def test_elicit_reset_confirmation_includes_current_url_in_prompt():
"""The elicitation message includes the current URL so the user knows what they're replacing.""" """The elicitation message includes the current URL so the user knows what they're replacing."""
from unittest.mock import AsyncMock, MagicMock
from unraid_mcp.core.setup import elicit_reset_confirmation from unraid_mcp.core.setup import elicit_reset_confirmation
@@ -507,8 +493,6 @@ async def test_elicit_reset_confirmation_includes_current_url_in_prompt():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_credentials_not_configured_surfaces_as_tool_error_with_path(): async def test_credentials_not_configured_surfaces_as_tool_error_with_path():
"""CredentialsNotConfiguredError from a tool becomes ToolError with the credentials path.""" """CredentialsNotConfiguredError from a tool becomes ToolError with the credentials path."""
from unittest.mock import AsyncMock, patch
from tests.conftest import make_tool_fn from tests.conftest import make_tool_fn
from unraid_mcp.config.settings import CREDENTIALS_ENV_PATH from unraid_mcp.config.settings import CREDENTIALS_ENV_PATH
from unraid_mcp.core.exceptions import CredentialsNotConfiguredError, ToolError from unraid_mcp.core.exceptions import CredentialsNotConfiguredError, ToolError

View File

@@ -56,11 +56,13 @@ class TestStorageValidation:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="log_path"): with pytest.raises(ToolError, match="log_path"):
await tool_fn(action="disk", subaction="logs") await tool_fn(action="disk", subaction="logs")
_mock_graphql.assert_not_awaited()
async def test_logs_rejects_invalid_path(self, _mock_graphql: AsyncMock) -> None: async def test_logs_rejects_invalid_path(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="log_path must start with"): with pytest.raises(ToolError, match="log_path must start with"):
await tool_fn(action="disk", subaction="logs", log_path="/etc/shadow") await tool_fn(action="disk", subaction="logs", log_path="/etc/shadow")
_mock_graphql.assert_not_awaited()
async def test_logs_rejects_path_traversal(self, _mock_graphql: AsyncMock) -> None: async def test_logs_rejects_path_traversal(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
@@ -70,6 +72,7 @@ class TestStorageValidation:
# Traversal via .. — detected by early .. check # Traversal via .. — detected by early .. check
with pytest.raises(ToolError, match="log_path"): with pytest.raises(ToolError, match="log_path"):
await tool_fn(action="disk", subaction="logs", log_path="/var/log/../etc/passwd") await tool_fn(action="disk", subaction="logs", log_path="/var/log/../etc/passwd")
_mock_graphql.assert_not_awaited()
async def test_logs_allows_valid_paths(self, _mock_graphql: AsyncMock) -> None: async def test_logs_allows_valid_paths(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}} _mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}}
@@ -83,11 +86,13 @@ class TestStorageValidation:
await tool_fn( await tool_fn(
action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=10_001 action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=10_001
) )
_mock_graphql.assert_not_awaited()
async def test_logs_tail_lines_zero_rejected(self, _mock_graphql: AsyncMock) -> None: async def test_logs_tail_lines_zero_rejected(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool() tool_fn = _make_tool()
with pytest.raises(ToolError, match="tail_lines must be between"): with pytest.raises(ToolError, match="tail_lines must be between"):
await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=0) await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=0)
_mock_graphql.assert_not_awaited()
async def test_logs_tail_lines_at_max_accepted(self, _mock_graphql: AsyncMock) -> None: async def test_logs_tail_lines_at_max_accepted(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}} _mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}}

View File

@@ -5,6 +5,7 @@ and provides all configuration constants used throughout the application.
""" """
import os import os
import sys
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
@@ -51,13 +52,9 @@ def _parse_port(env_var: str, default: int) -> int:
try: try:
port = int(raw) port = int(raw)
except ValueError: except ValueError:
import sys
print(f"FATAL: {env_var}={raw!r} is not a valid integer port number", file=sys.stderr) print(f"FATAL: {env_var}={raw!r} is not a valid integer port number", file=sys.stderr)
sys.exit(1) sys.exit(1)
if not (1 <= port <= 65535): if not (1 <= port <= 65535):
import sys
print(f"FATAL: {env_var}={port} outside valid port range 1-65535", file=sys.stderr) print(f"FATAL: {env_var}={port} outside valid port range 1-65535", file=sys.stderr)
sys.exit(1) sys.exit(1)
return port return port
@@ -65,7 +62,7 @@ def _parse_port(env_var: str, default: int) -> int:
UNRAID_MCP_PORT = _parse_port("UNRAID_MCP_PORT", 6970) UNRAID_MCP_PORT = _parse_port("UNRAID_MCP_PORT", 6970)
UNRAID_MCP_HOST = os.getenv("UNRAID_MCP_HOST", "0.0.0.0") # noqa: S104 — intentional for Docker UNRAID_MCP_HOST = os.getenv("UNRAID_MCP_HOST", "0.0.0.0") # noqa: S104 — intentional for Docker
UNRAID_MCP_TRANSPORT = os.getenv("UNRAID_MCP_TRANSPORT", "streamable-http").lower() UNRAID_MCP_TRANSPORT = os.getenv("UNRAID_MCP_TRANSPORT", "stdio").lower()
# SSL Configuration # SSL Configuration
raw_verify_ssl = os.getenv("UNRAID_VERIFY_SSL", "true").lower() raw_verify_ssl = os.getenv("UNRAID_VERIFY_SSL", "true").lower()
@@ -76,41 +73,6 @@ elif raw_verify_ssl in ["true", "1", "yes"]:
else: # Path to CA bundle else: # Path to CA bundle
UNRAID_VERIFY_SSL = raw_verify_ssl UNRAID_VERIFY_SSL = raw_verify_ssl
# Google OAuth Configuration (Optional)
# -------------------------------------
# When set, the MCP HTTP server requires Google login before tool calls.
# UNRAID_MCP_BASE_URL must match the public URL clients use to reach this server.
# Google Cloud Console → Credentials → Authorized redirect URIs:
# Add: <UNRAID_MCP_BASE_URL>/auth/callback
GOOGLE_CLIENT_ID = os.getenv("GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.getenv("GOOGLE_CLIENT_SECRET", "")
UNRAID_MCP_BASE_URL = os.getenv("UNRAID_MCP_BASE_URL", "")
# JWT signing key for FastMCP OAuth tokens.
# MUST be set to a stable secret so tokens survive server restarts.
# Generate once: python3 -c "import secrets; print(secrets.token_hex(32))"
# Never change this value — all existing tokens will be invalidated.
UNRAID_MCP_JWT_SIGNING_KEY = os.getenv("UNRAID_MCP_JWT_SIGNING_KEY", "")
def is_google_auth_configured() -> bool:
"""Return True when all required Google OAuth vars are present."""
return bool(GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET and UNRAID_MCP_BASE_URL)
# API Key Authentication (Optional)
# ----------------------------------
# A static bearer token clients can use instead of (or alongside) Google OAuth.
# Can be set to the same value as UNRAID_API_KEY for simplicity, or a separate
# dedicated secret for MCP access.
UNRAID_MCP_API_KEY = os.getenv("UNRAID_MCP_API_KEY", "")
def is_api_key_auth_configured() -> bool:
"""Return True when UNRAID_MCP_API_KEY is set."""
return bool(UNRAID_MCP_API_KEY)
# Logging Configuration # Logging Configuration
LOG_LEVEL_STR = os.getenv("UNRAID_MCP_LOG_LEVEL", "INFO").upper() LOG_LEVEL_STR = os.getenv("UNRAID_MCP_LOG_LEVEL", "INFO").upper()
LOG_FILE_NAME = os.getenv("UNRAID_MCP_LOG_FILE", "unraid-mcp.log") LOG_FILE_NAME = os.getenv("UNRAID_MCP_LOG_FILE", "unraid-mcp.log")
@@ -190,10 +152,6 @@ def get_config_summary() -> dict[str, Any]:
"log_file": str(LOG_FILE_PATH), "log_file": str(LOG_FILE_PATH),
"config_valid": is_valid, "config_valid": is_valid,
"missing_config": missing if not is_valid else None, "missing_config": missing if not is_valid else None,
"google_auth_enabled": is_google_auth_configured(),
"google_auth_base_url": UNRAID_MCP_BASE_URL if is_google_auth_configured() else None,
"jwt_signing_key_configured": bool(UNRAID_MCP_JWT_SIGNING_KEY),
"api_key_auth_enabled": is_api_key_auth_configured(),
} }

View File

@@ -52,13 +52,16 @@ async def elicit_reset_confirmation(ctx: Context | None, current_url: str) -> bo
response_type=bool, response_type=bool,
) )
except NotImplementedError: except NotImplementedError:
# Client doesn't support elicitation — treat as "proceed with reset" so # Client doesn't support elicitation — return False (decline the reset).
# non-interactive clients (stdio, CI) are not permanently blocked from # Auto-approving a destructive credential reset on non-interactive clients
# reconfiguring credentials. # could silently overwrite working credentials; callers must use a client
# that supports elicitation or configure credentials directly in the .env file.
logger.warning( logger.warning(
"MCP client does not support elicitation for reset confirmation — proceeding with reset." "MCP client does not support elicitation for reset confirmation — declining reset. "
"To reconfigure credentials, edit %s directly.",
CREDENTIALS_ENV_PATH,
) )
return True return False
if result.action != "accept": if result.action != "accept":
logger.info("Credential reset declined by user (%s).", result.action) logger.info("Credential reset declined by user (%s).", result.action)

View File

@@ -4,20 +4,16 @@ This is the main server implementation using the modular architecture with
separate modules for configuration, core functionality, subscriptions, and tools. separate modules for configuration, core functionality, subscriptions, and tools.
""" """
import hmac
import sys import sys
from typing import Any
from fastmcp import FastMCP from fastmcp import FastMCP
from fastmcp.server.auth import AccessToken, MultiAuth, TokenVerifier
from fastmcp.server.auth.providers.google import GoogleProvider
from fastmcp.server.middleware.caching import CallToolSettings, ResponseCachingMiddleware from fastmcp.server.middleware.caching import CallToolSettings, ResponseCachingMiddleware
from fastmcp.server.middleware.error_handling import ErrorHandlingMiddleware from fastmcp.server.middleware.error_handling import ErrorHandlingMiddleware
from fastmcp.server.middleware.logging import LoggingMiddleware from fastmcp.server.middleware.logging import LoggingMiddleware
from fastmcp.server.middleware.rate_limiting import SlidingWindowRateLimitingMiddleware from fastmcp.server.middleware.rate_limiting import SlidingWindowRateLimitingMiddleware
from fastmcp.server.middleware.response_limiting import ResponseLimitingMiddleware from fastmcp.server.middleware.response_limiting import ResponseLimitingMiddleware
from .config.logging import logger from .config.logging import log_configuration_status, logger
from .config.settings import ( from .config.settings import (
LOG_LEVEL_STR, LOG_LEVEL_STR,
UNRAID_MCP_HOST, UNRAID_MCP_HOST,
@@ -49,10 +45,14 @@ _error_middleware = ErrorHandlingMiddleware(
include_traceback=LOG_LEVEL_STR == "DEBUG", include_traceback=LOG_LEVEL_STR == "DEBUG",
) )
# 3. Unraid API rate limit: 100 requests per 10 seconds. # 3. Rate limiting: 540 requests per 60-second sliding window.
# SlidingWindowRateLimitingMiddleware only accepts window_minutes (int), so express # SlidingWindowRateLimitingMiddleware only supports window_minutes (int), so the
# the 10-second budget as a 1-minute equivalent: 540 req/60 s to stay comfortably # upstream Unraid "100 req/10 s" burst limit cannot be enforced exactly here.
# under the 600 req/min ceiling. # 540 req/min is a conservative 1-minute equivalent that prevents sustained
# overload while staying well under the 600 req/min ceiling.
# Note: this does NOT cap bursts within a 10 s window; a client can still send
# up to 540 requests in the first 10 s of a window. Add a sub-minute rate limiter
# in front of this server (e.g. nginx limit_req) if tighter burst control is needed.
_rate_limiter = SlidingWindowRateLimitingMiddleware(max_requests=540, window_minutes=1) _rate_limiter = SlidingWindowRateLimitingMiddleware(max_requests=540, window_minutes=1)
# 4. Cap tool responses at 512 KB to protect the client context window. # 4. Cap tool responses at 512 KB to protect the client context window.
@@ -80,117 +80,13 @@ _cache_middleware = ResponseCachingMiddleware(
) )
class ApiKeyVerifier(TokenVerifier): # Initialize FastMCP instance — no built-in auth.
"""Bearer token verifier that validates against a static API key. # Authentication is delegated to an external OAuth gateway (nginx, Caddy,
# Authelia, Authentik, etc.) placed in front of this server.
Clients present the key as a standard OAuth bearer token:
Authorization: Bearer <UNRAID_MCP_API_KEY>
This allows machine-to-machine access (e.g. CI, scripts, other agents)
without going through the Google OAuth browser flow.
"""
def __init__(self, api_key: str) -> None:
super().__init__()
self._api_key = api_key
async def verify_token(self, token: str) -> AccessToken | None:
if self._api_key and hmac.compare_digest(token.encode(), self._api_key.encode()):
return AccessToken(
token=token,
client_id="api-key-client",
scopes=[],
)
return None
def _build_google_auth() -> "GoogleProvider | None":
"""Build GoogleProvider when OAuth env vars are configured, else return None.
Returns None (no auth) when GOOGLE_CLIENT_ID or GOOGLE_CLIENT_SECRET are absent,
preserving backward compatibility for existing unprotected setups.
"""
from .config.settings import (
GOOGLE_CLIENT_ID,
GOOGLE_CLIENT_SECRET,
UNRAID_MCP_BASE_URL,
UNRAID_MCP_JWT_SIGNING_KEY,
UNRAID_MCP_TRANSPORT,
is_google_auth_configured,
)
if not is_google_auth_configured():
return None
if UNRAID_MCP_TRANSPORT == "stdio":
logger.warning(
"Google OAuth is configured but UNRAID_MCP_TRANSPORT=stdio. "
"OAuth requires HTTP transport (streamable-http or sse). "
"Auth will be applied but may not work as expected."
)
kwargs: dict[str, Any] = {
"client_id": GOOGLE_CLIENT_ID,
"client_secret": GOOGLE_CLIENT_SECRET,
"base_url": UNRAID_MCP_BASE_URL,
# Prefer short-lived access tokens without refresh-token rotation churn.
# This reduces reconnect instability in MCP clients that re-auth frequently.
"extra_authorize_params": {"access_type": "online", "prompt": "consent"},
# Skip the FastMCP consent page — goes directly to Google.
# The consent page has a CSRF double-load race: two concurrent GET requests
# each regenerate the CSRF token, the second overwrites the first in the
# transaction store, and the POST fails with "Invalid or expired consent token".
"require_authorization_consent": False,
}
if UNRAID_MCP_JWT_SIGNING_KEY:
kwargs["jwt_signing_key"] = UNRAID_MCP_JWT_SIGNING_KEY
else:
logger.warning(
"UNRAID_MCP_JWT_SIGNING_KEY is not set. FastMCP will derive a key automatically, "
"but tokens may be invalidated on server restart. "
"Set UNRAID_MCP_JWT_SIGNING_KEY to a stable secret."
)
logger.info(
f"Google OAuth enabled — base_url={UNRAID_MCP_BASE_URL}, "
f"redirect_uri={UNRAID_MCP_BASE_URL}/auth/callback"
)
return GoogleProvider(**kwargs)
def _build_auth() -> "GoogleProvider | ApiKeyVerifier | MultiAuth | None":
"""Build the active auth stack from environment configuration.
Returns:
- MultiAuth(server=GoogleProvider, verifiers=[ApiKeyVerifier])
when both GOOGLE_CLIENT_ID and UNRAID_MCP_API_KEY are set.
- GoogleProvider alone when only Google OAuth vars are set.
- ApiKeyVerifier alone when only UNRAID_MCP_API_KEY is set.
- None when no auth vars are configured (open server).
"""
from .config.settings import UNRAID_MCP_API_KEY, is_api_key_auth_configured
google = _build_google_auth()
api_key = ApiKeyVerifier(UNRAID_MCP_API_KEY) if is_api_key_auth_configured() else None
if google and api_key:
logger.info("Auth: Google OAuth + API key both enabled (MultiAuth)")
return MultiAuth(server=google, verifiers=[api_key])
if api_key:
logger.info("Auth: API key authentication enabled")
return api_key
return google # GoogleProvider or None
# Build auth stack — GoogleProvider, ApiKeyVerifier, MultiAuth, or None.
_auth = _build_auth()
# Initialize FastMCP instance
mcp = FastMCP( mcp = FastMCP(
name="Unraid MCP Server", name="Unraid MCP Server",
instructions="Provides tools to interact with an Unraid server's GraphQL API.", instructions="Provides tools to interact with an Unraid server's GraphQL API.",
version=VERSION, version=VERSION,
auth=_auth,
middleware=[ middleware=[
_logging_middleware, _logging_middleware,
_error_middleware, _error_middleware,
@@ -238,9 +134,6 @@ def run_server() -> None:
"Server will prompt for credentials on first tool call via elicitation." "Server will prompt for credentials on first tool call via elicitation."
) )
# Log configuration (delegated to shared function)
from .config.logging import log_configuration_status
log_configuration_status(logger) log_configuration_status(logger)
if UNRAID_VERIFY_SSL is False: if UNRAID_VERIFY_SSL is False:
@@ -250,25 +143,11 @@ def run_server() -> None:
"Only use this in trusted networks or for development." "Only use this in trusted networks or for development."
) )
if _auth is not None: if UNRAID_MCP_TRANSPORT in ("streamable-http", "sse"):
from .config.settings import is_google_auth_configured
if is_google_auth_configured():
from .config.settings import UNRAID_MCP_BASE_URL
logger.info(
"Google OAuth ENABLED — clients must authenticate before calling tools. "
f"Redirect URI: {UNRAID_MCP_BASE_URL}/auth/callback"
)
else:
logger.info(
"API key authentication ENABLED — present UNRAID_MCP_API_KEY as bearer token."
)
else:
logger.warning( logger.warning(
"No authentication configured — MCP server is open to all clients on the network. " "⚠️ NO AUTHENTICATION — HTTP server is open to all clients on the network. "
"Set GOOGLE_CLIENT_ID + GOOGLE_CLIENT_SECRET + UNRAID_MCP_BASE_URL to enable Google OAuth, " "Protect this server with an external OAuth gateway (nginx, Caddy, Authelia, Authentik) "
"or set UNRAID_MCP_API_KEY to enable bearer token authentication." "or restrict access at the network layer (firewall, VPN, Tailscale)."
) )
logger.info( logger.info(
@@ -276,13 +155,17 @@ def run_server() -> None:
) )
try: try:
if UNRAID_MCP_TRANSPORT == "streamable-http": if UNRAID_MCP_TRANSPORT in ("streamable-http", "sse"):
if UNRAID_MCP_TRANSPORT == "sse":
logger.warning(
"SSE transport is deprecated. Consider switching to 'streamable-http'."
)
mcp.run( mcp.run(
transport="streamable-http", host=UNRAID_MCP_HOST, port=UNRAID_MCP_PORT, path="/mcp" transport=UNRAID_MCP_TRANSPORT,
host=UNRAID_MCP_HOST,
port=UNRAID_MCP_PORT,
path="/mcp",
) )
elif UNRAID_MCP_TRANSPORT == "sse":
logger.warning("SSE transport is deprecated. Consider switching to 'streamable-http'.")
mcp.run(transport="sse", host=UNRAID_MCP_HOST, port=UNRAID_MCP_PORT, path="/mcp")
elif UNRAID_MCP_TRANSPORT == "stdio": elif UNRAID_MCP_TRANSPORT == "stdio":
mcp.run() mcp.run()
else: else:

View File

@@ -7,7 +7,8 @@ and the MCP protocol, providing fallback queries when subscription data is unava
import asyncio import asyncio
import json import json
import os import os
from typing import Final from collections.abc import Callable, Coroutine
from typing import Any, Final
import anyio import anyio
from fastmcp import FastMCP from fastmcp import FastMCP
@@ -22,6 +23,8 @@ from .snapshot import subscribe_once
_subscriptions_started = False _subscriptions_started = False
_startup_lock: Final[asyncio.Lock] = asyncio.Lock() _startup_lock: Final[asyncio.Lock] = asyncio.Lock()
_terminal_states = frozenset({"failed", "auth_failed", "max_retries_exceeded"})
async def ensure_subscriptions_started() -> None: async def ensure_subscriptions_started() -> None:
"""Ensure subscriptions are started, called from async context.""" """Ensure subscriptions are started, called from async context."""
@@ -104,15 +107,17 @@ def register_subscription_resources(mcp: FastMCP) -> None:
} }
) )
def _make_resource_fn(action: str): def _make_resource_fn(action: str) -> Callable[[], Coroutine[Any, Any, str]]:
async def _live_resource() -> str: async def _live_resource() -> str:
await ensure_subscriptions_started() await ensure_subscriptions_started()
data = await subscription_manager.get_resource_data(action) data = await subscription_manager.get_resource_data(action)
if data is not None: if data is not None:
return json.dumps(data, indent=2) return json.dumps(data, indent=2)
# Surface permanent errors instead of reporting "connecting" indefinitely # Surface permanent errors only when the connection is in a terminal failure
# state — if the subscription has since reconnected, ignore the stale error.
last_error = subscription_manager.last_error.get(action) last_error = subscription_manager.last_error.get(action)
if last_error: conn_state = subscription_manager.connection_states.get(action, "")
if last_error and conn_state in _terminal_states:
return json.dumps( return json.dumps(
{ {
"status": "error", "status": "error",

View File

@@ -792,15 +792,22 @@ def _find_container(
if strict: if strict:
return None return None
id_lower = identifier.lower() id_lower = identifier.lower()
for c in containers: # Collect prefix matches first, then fall back to substring matches.
for name in c.get("names", []): prefix_matches = [
if name.lower().startswith(id_lower): c for c in containers if any(n.lower().startswith(id_lower) for n in c.get("names", []))
return c ]
for c in containers: candidates = prefix_matches or [
for name in c.get("names", []): c for c in containers if any(id_lower in n.lower() for n in c.get("names", []))
if id_lower in name.lower(): ]
return c if not candidates:
return None return None
if len(candidates) == 1:
return candidates[0]
names = [n for c in candidates for n in c.get("names", [])]
raise ToolError(
f"Container identifier '{identifier}' is ambiguous — matches: {', '.join(names[:10])}. "
"Use a more specific name or the full container ID."
)
async def _resolve_container_id(container_id: str, *, strict: bool = False) -> str: async def _resolve_container_id(container_id: str, *, strict: bool = False) -> str:
@@ -1258,6 +1265,8 @@ async def _handle_key(
input_data["name"] = name input_data["name"] = name
if roles is not None: if roles is not None:
input_data["roles"] = roles input_data["roles"] = roles
if permissions is not None:
input_data["permissions"] = permissions
data = await make_graphql_request(_KEY_MUTATIONS["update"], {"input": input_data}) data = await make_graphql_request(_KEY_MUTATIONS["update"], {"input": input_data})
updated_key = (data.get("apiKey") or {}).get("update") updated_key = (data.get("apiKey") or {}).get("update")
if not updated_key: if not updated_key:
@@ -1277,7 +1286,7 @@ async def _handle_key(
if subaction in ("add_role", "remove_role"): if subaction in ("add_role", "remove_role"):
if not key_id: if not key_id:
raise ToolError(f"key_id is required for key/{subaction}") raise ToolError(f"key_id is required for key/{subaction}")
if not roles or len(roles) == 0: if not roles:
raise ToolError( raise ToolError(
f"roles is required for key/{subaction} (pass as roles=['ROLE_NAME'])" f"roles is required for key/{subaction} (pass as roles=['ROLE_NAME'])"
) )