12 Commits

Author SHA1 Message Date
84675815b4 release: version 0.0.6 🚀
All checks were successful
Build Docker image / Create Release (push) Successful in 6s
Build Docker image / deploy (push) Successful in 57s
2026-02-28 17:08:27 +01:00
37013fbd81 fix: even more schema apatations 2026-02-28 17:08:23 +01:00
202354bbc1 release: version 0.0.5 🚀
All checks were successful
Build Docker image / Create Release (push) Successful in 7s
Build Docker image / deploy (push) Successful in 1m7s
2026-02-28 16:49:10 +01:00
0d876564cc fix: runtime error due to broken import 2026-02-28 16:49:06 +01:00
df2db88e0e release: version 0.0.4 🚀
All checks were successful
Build Docker image / Create Release (push) Successful in 6s
Build Docker image / deploy (push) Successful in 54s
2026-02-28 16:45:22 +01:00
88983c6736 fix: even more changes to accommodate older GraphQL schema 2026-02-28 16:45:19 +01:00
f89ed7275b release: version 0.0.3 🚀
All checks were successful
Build Docker image / Create Release (push) Successful in 6s
Build Docker image / deploy (push) Successful in 1m38s
2026-02-28 15:51:27 +01:00
3d23d3c5b4 fix: adapt for supported GraphQL schema on 6.12.13 2026-02-28 15:51:14 +01:00
3acdfa3b2b release: version 0.0.2 🚀
All checks were successful
Build Docker image / Create Release (push) Successful in 9s
Build Docker image / deploy (push) Successful in 1m1s
2026-02-21 14:34:25 +01:00
da669a49cb fix: set scripts as executable, refs NOISSUE 2026-02-21 14:34:03 +01:00
d7f7ac7a6f fix: initial test release 2026-02-21 14:02:18 +01:00
e023d89308 ci: add Homelab CI framework 2026-02-21 13:51:09 +01:00
35 changed files with 838 additions and 321 deletions

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env sh
echo "Running commit message checks..."
. "$(dirname -- "$0")/../../.gitea/conventional_commits/hooks/text-styles.sh"
# Get the commit message
commit="$(cat .git/COMMIT_EDITMSG)"
# Define the conventional commit regex
regex='^((build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)(\(.+\))?(!?):\s([a-zA-Z0-9-_!\&\.\%\(\)\=\w\s]+)(\s?(,?\s?)((ref(s?):?\s?)(([A-Z0-9]+\-[0-9]+)|(NOISSUE)))?))|(release: .*)$'
# Check if the commit message matches the conventional commit format
if ! echo "$commit" | grep -Pq "$regex"
then
echo
colorPrint red "❌ Failed to create commit. Your commit message does not follow the conventional commit format."
colorPrint red "Please use the following format: $(colorPrint brightRed 'type(scope)?: description')"
colorPrint red "Available types are listed below. Scope is optional. Use ! after type to indicate breaking change."
echo
colorPrint brightWhite "Quick examples:"
echo "feat: add email notifications on new direct messages refs ABC-1213"
echo "feat(shopping cart): add the amazing button ref: DEFG-23"
echo "feat!: remove ticket list endpoint ref DADA-109"
echo "fix(api): handle empty message in request body refs: MINE-82"
echo "chore(deps): bump some-package-name to version 2.0.0 refs ASDF-12"
echo
colorPrint brightWhite "Commit types:"
colorPrint brightCyan "build: $(colorPrint white "Changes that affect the build system or external dependencies (example scopes: gulp, broccoli, npm)" -n)"
colorPrint brightCyan "ci: $(colorPrint white "Changes to CI configuration files and scripts (example scopes: Travis, Circle, BrowserStack, SauceLabs)" -n)"
colorPrint brightCyan "chore: $(colorPrint white "Changes which doesn't change source code or tests e.g. changes to the build process, auxiliary tools, libraries" -n)"
colorPrint brightCyan "docs: $(colorPrint white "Documentation only changes" -n)"
colorPrint brightCyan "feat: $(colorPrint white "A new feature" -n)"
colorPrint brightCyan "fix: $(colorPrint white "A bug fix" -n)"
colorPrint brightCyan "perf: $(colorPrint white "A code change that improves performance" -n)"
colorPrint brightCyan "refactor: $(colorPrint white "A code change that neither fixes a bug nor adds a feature" -n)"
colorPrint brightCyan "revert: $(colorPrint white "Revert a change previously introduced" -n)"
colorPrint brightCyan "test: $(colorPrint white "Adding missing tests or correcting existing tests" -n)"
echo
colorPrint brightWhite "Reminders"
echo "Put newline before extended commit body"
echo "More details at $(underline "http://www.conventionalcommits.org")"
echo
echo "The commit message you attempted was: $commit"
echo
echo "The exact RegEx applied to this message was:"
colorPrint brightCyan "$regex"
echo
exit 1
fi

View File

@@ -0,0 +1,106 @@
#!/bin/bash
# Rules for generating semantic versioning
# major: breaking change
# minor: feat, style
# patch: build, fix, perf, refactor, revert
PREVENT_REMOVE_FILE=$1
TEMP_FILE_PATH=.gitea/conventional_commits/tmp
LAST_TAG=$(git describe --tags --abbrev=0 --always)
echo "Last tag: #$LAST_TAG#"
PATTERN="^[0-9]+\.[0-9]+\.[0-9]+$"
increment_version() {
local version=$1
local increment=$2
local major=$(echo $version | cut -d. -f1)
local minor=$(echo $version | cut -d. -f2)
local patch=$(echo $version | cut -d. -f3)
if [ "$increment" == "major" ]; then
major=$((major + 1))
minor=0
patch=0
elif [ "$increment" == "minor" ]; then
minor=$((minor + 1))
patch=0
elif [ "$increment" == "patch" ]; then
patch=$((patch + 1))
fi
echo "${major}.${minor}.${patch}"
}
create_file() {
local with_range=$1
if [ -s $TEMP_FILE_PATH/messages.txt ]; then
return 1
fi
if [ "$with_range" == "true" ]; then
git log $LAST_TAG..HEAD --no-decorate --pretty=format:"%s" > $TEMP_FILE_PATH/messages.txt
else
git log --no-decorate --pretty=format:"%s" > $TEMP_FILE_PATH/messages.txt
fi
}
get_commit_range() {
rm -f $TEMP_FILE_PATH/messages.txt
if [[ $LAST_TAG =~ $PATTERN ]]; then
create_file true
else
create_file
LAST_TAG="0.0.0"
fi
echo " " >> $TEMP_FILE_PATH/messages.txt
}
start() {
mkdir -p $TEMP_FILE_PATH
get_commit_range
new_version=$LAST_TAG
increment_type=""
while read message; do
echo $message
if echo $message | grep -Pq '(feat|style)(\([\w]+\))?!:([a-zA-Z0-9-_!\&\.\%\(\)\=\w\s]+)(\s?(,?\s?)((ref(s?):?\s?)(([A-Z0-9]+\-[0-9]+)|(#[0-9]+)|(NOISSUE)))?)'; then
increment_type="major"
echo "a"
break
elif echo $message | grep -Pq '(feat|style)(\([\w]+\))?:([a-zA-Z0-9-_!\&\.\%\(\)\=\w\s]+)(\s?(,?\s?)((ref(s?):?\s?)(([A-Z0-9]+\-[0-9]+)|(#[0-9]+)|(NOISSUE)))?)'; then
if [ -z "$increment_type" ] || [ "$increment_type" == "patch" ]; then
increment_type="minor"
echo "b"
fi
elif echo $message | grep -Pq '(build|fix|perf|refactor|revert)(\(.+\))?:\s([a-zA-Z0-9-_!\&\.\%\(\)\=\w\s]+)(\s?(,?\s?)((ref(s?):?\s?)(([A-Z0-9]+\-[0-9]+)|(#[0-9]+)|(NOISSUE)))?)'; then
if [ -z "$increment_type" ]; then
increment_type="patch"
echo "c"
fi
fi
done < $TEMP_FILE_PATH/messages.txt
if [ -n "$increment_type" ]; then
new_version=$(increment_version $LAST_TAG $increment_type)
echo "New version: $new_version"
gitchangelog | grep -v "[rR]elease:" > HISTORY.md
git add HISTORY.md
echo $new_version > VERSION
git add VERSION
git commit -m "release: version $new_version 🚀"
echo "creating git tag : $new_version"
git tag $new_version
git push -u origin HEAD --tags
echo "Gitea Actions will detect the new tag and release the new version."
else
echo "No changes requiring a version increment."
fi
}
start
if [ -z "$PREVENT_REMOVE_FILE" ]; then
rm -f $TEMP_FILE_PATH/messages.txt
fi

View File

@@ -0,0 +1,44 @@
#!/bin/sh
colorPrint() {
local color=$1
local text=$2
shift 2
local newline="\n"
local tab=""
for arg in "$@"
do
if [ "$arg" = "-t" ]; then
tab="\t"
elif [ "$arg" = "-n" ]; then
newline=""
fi
done
case $color in
black) color_code="30" ;;
red) color_code="31" ;;
green) color_code="32" ;;
yellow) color_code="33" ;;
blue) color_code="34" ;;
magenta) color_code="35" ;;
cyan) color_code="36" ;;
white) color_code="37" ;;
brightBlack) color_code="90" ;;
brightRed) color_code="91" ;;
brightGreen) color_code="92" ;;
brightYellow) color_code="93" ;;
brightBlue) color_code="94" ;;
brightMagenta) color_code="95" ;;
brightCyan) color_code="96" ;;
brightWhite) color_code="97" ;;
*) echo "Invalid color"; return ;;
esac
printf "\e[${color_code}m${tab}%s\e[0m${newline}" "$text"
}
underline () {
printf "\033[4m%s\033[24m" "$1"
}

4
.gitea/release_message.sh Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env bash
# generates changelog since last release
previous_tag=$(git tag --sort=-creatordate | sed -n 2p)
git shortlog "${previous_tag}.." | sed 's/^./ &/'

View File

@@ -0,0 +1,61 @@
name: Build Docker image
permissions:
contents: write
env:
SKIP_MAKE_SETUP_CHECK: 'true'
on:
push:
# Sequence of patterns matched against refs/tags
tags:
- '*' # Push events to matching v*, i.e. v1.0, v20.15.10
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
release:
name: Create Release
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v5
with:
# by default, it uses a depth of 1
# this fetches all history so that we can read each commit
fetch-depth: 0
- name: Generate Changelog
run: .gitea/release_message.sh > release_message.md
- name: Release
uses: softprops/action-gh-release@v1
with:
body_path: release_message.md
deploy:
needs: release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Check version match
run: |
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
if [ "$(cat VERSION)" = "${GITHUB_REF_NAME}" ] ; then
echo "Version matches successfully!"
else
echo "Version must match!"
exit -1
fi
- name: Login to Gitea container registry
uses: docker/login-action@v3
with:
username: gitearobot
password: ${{ secrets.PACKAGE_GITEA_PAT }}
registry: git.disi.dev
- name: Build and publish
run: |
REPOSITORY_OWNER=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $1}' | tr '[:upper:]' '[:lower:]')
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
docker build -t "git.disi.dev/$REPOSITORY_OWNER/unraid-mcp:$(cat VERSION)" ./
docker push "git.disi.dev/$REPOSITORY_OWNER/unraid-mcp:$(cat VERSION)"

2
.gitignore vendored
View File

@@ -57,3 +57,5 @@ client_secret_*.apps.googleusercontent.com.json
web-ui/frontend/node_modules web-ui/frontend/node_modules
web-ui/backend/.venv-backend/ web-ui/backend/.venv-backend/
.pnpm-store/ .pnpm-store/
.continue

View File

@@ -84,17 +84,16 @@ docker compose down
- **Health Monitoring**: Comprehensive health check tool for system monitoring - **Health Monitoring**: Comprehensive health check tool for system monitoring
- **Real-time Subscriptions**: WebSocket-based live data streaming - **Real-time Subscriptions**: WebSocket-based live data streaming
### Tool Categories (10 Tools, 76 Actions) ### Tool Categories (9 Tools, 69 Actions)
1. **`unraid_info`** (19 actions): overview, array, network, registration, connect, variables, metrics, services, display, config, online, owner, settings, server, servers, flash, ups_devices, ups_device, ups_config 1. **`unraid_info`** (19 actions): overview, array, network, registration, connect, variables, metrics, services, display, config, online, owner, settings, server, servers, flash, ups_devices, ups_device
2. **`unraid_array`** (5 actions): parity_start, parity_pause, parity_resume, parity_cancel, parity_status 2. **`unraid_storage`** (6 actions): shares, disks, disk_details, log_files, logs
3. **`unraid_storage`** (6 actions): shares, disks, disk_details, unassigned, log_files, logs 3. **`unraid_docker`** (15 actions): list, details, start, stop, restart, pause, unpause, remove, update, update_all, logs, networks, network_details, port_conflicts, check_updates
4. **`unraid_docker`** (15 actions): list, details, start, stop, restart, pause, unpause, remove, update, update_all, logs, networks, network_details, port_conflicts, check_updates 4. **`unraid_vm`** (9 actions): list, details, start, stop, pause, resume, force_stop, reboot, reset
5. **`unraid_vm`** (9 actions): list, details, start, stop, pause, resume, force_stop, reboot, reset 5. **`unraid_notifications`** (9 actions): overview, list, warnings, create, archive, unread, delete, delete_archived, archive_all
6. **`unraid_notifications`** (9 actions): overview, list, warnings, create, archive, unread, delete, delete_archived, archive_all 6. **`unraid_rclone`** (4 actions): list_remotes, config_form, create_remote, delete_remote
7. **`unraid_rclone`** (4 actions): list_remotes, config_form, create_remote, delete_remote 7. **`unraid_users`** (1 action): me
8. **`unraid_users`** (1 action): me 8. **`unraid_keys`** (5 actions): list, get, create, update, delete
9. **`unraid_keys`** (5 actions): list, get, create, update, delete 9. **`unraid_health`** (3 actions): check, test_connection, diagnose
10. **`unraid_health`** (3 actions): check, test_connection, diagnose
### Environment Variable Hierarchy ### Environment Variable Hierarchy
The server loads environment variables from multiple locations in order: The server loads environment variables from multiple locations in order:

View File

@@ -11,6 +11,7 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /usr/local/bin/
COPY pyproject.toml . COPY pyproject.toml .
COPY uv.lock . COPY uv.lock .
COPY README.md . COPY README.md .
COPY LICENSE .
# Copy the source code # Copy the source code
COPY unraid_mcp/ ./unraid_mcp/ COPY unraid_mcp/ ./unraid_mcp/

504
HISTORY.md Normal file
View File

@@ -0,0 +1,504 @@
Changelog
=========
(unreleased)
------------
Fix
~~~
- Even more schema apatations. [Simon Diesenreiter]
0.0.5 (2026-02-28)
------------------
Fix
~~~
- Runtime error due to broken import. [Simon Diesenreiter]
Other
~~~~~
0.0.4 (2026-02-28)
------------------
Fix
~~~
- Even more changes to accommodate older GraphQL schema. [Simon
Diesenreiter]
Other
~~~~~
0.0.3 (2026-02-28)
------------------
Fix
~~~
- Adapt for supported GraphQL schema on 6.12.13. [Simon Diesenreiter]
Other
~~~~~
0.0.2 (2026-02-21)
------------------
Fix
~~~
- Set scripts as executable, refs NOISSUE. [Simon Diesenreiter]
- Initial test release. [Simon Diesenreiter]
Other
~~~~~
0.0.1 (2026-02-21)
------------------
Fix
~~~
- Use CLAUDE_PLUGIN_ROOT for portable MCP server configuration. [Jacob
Magar]
Update .mcp.json to use environment variable
for the --directory argument, ensuring the MCP server works correctly
regardless of where the plugin is installed.
This follows Claude Code plugin best practices for MCP server bundling.
- Correct marketplace.json source field format. [Jacob Magar]
Change source from absolute GitHub URL to relative path "./"
This follows Claude Code marketplace convention where source paths
are relative to the cloned repository root, not external URLs.
Matches pattern from working examples like claude-homelab marketplace.
- Upgrade fastmcp and mcp to resolve remaining security vulnerabilities.
[Claude]
Security Updates:
- fastmcp 2.12.5 → 2.14.5 (fixes CVE-2025-66416, command injection, XSS, auth takeover)
- mcp 1.16.0 → 1.26.0 (enables DNS rebinding protection, addresses CVE requirements)
- websockets 13.1 → 16.0 (required dependency for fastmcp 2.14.5)
Dependency Changes:
+ beartype 0.22.9
+ cachetools 7.0.1
+ cloudpickle 3.1.2
+ croniter 6.0.0
+ diskcache 5.6.3
+ fakeredis 2.34.0
+ importlib-metadata 8.7.1
+ jsonref 1.1.1
+ lupa 2.6
+ opentelemetry-api 1.39.1
+ pathvalidate 3.3.1
+ platformdirs 4.9.2
+ prometheus-client 0.24.1
+ py-key-value-aio 0.3.0
+ py-key-value-shared 0.3.0
+ pydocket 0.17.7
+ pyjwt 2.11.0
+ python-dateutil 2.9.0.post0
+ python-json-logger 4.0.0
+ redis 7.2.0
+ shellingham 1.5.4
+ sortedcontainers 2.4.0
+ typer 0.23.2
+ zipp 3.23.0
Removed Dependencies:
- isodate 0.7.2
- lazy-object-proxy 1.12.0
- markupsafe 3.0.3
- openapi-core 0.22.0
- openapi-schema-validator 0.6.3
- openapi-spec-validator 0.7.2
- rfc3339-validator 0.1.4
- werkzeug 3.1.5
Testing:
- All 493 tests pass
- Type checking passes (ty check)
- Linting passes (ruff check)
This completes the resolution of GitHub Dependabot security alerts.
Addresses the remaining 5 high/medium severity vulnerabilities in fastmcp and mcp packages.
- Correct marketplace.json source field and improve async operations.
[Jacob Magar]
- Fix marketplace.json: change source from relative path to GitHub URL
(was "skills/unraid", now "https://github.com/jmagar/unraid-mcp")
This resolves the "Invalid input" schema validation error when adding
the marketplace to Claude Code
- Refactor subscriptions autostart to use anyio.Path for async file checks
(replaces blocking pathlib.Path.exists() with async anyio.Path.exists())
- Update dependencies: anyio 4.11.0→4.12.1, attrs 25.3.0→25.4.0
- Correct marketplace.json format for Claude Code compatibility.
[Claude]
- Rename marketplace from "unraid-mcp" to "jmagar-unraid-mcp" to match expected directory structure
- Wrap description, version, homepage, and repository in metadata object per standard format
- Fixes "Marketplace file not found" error when adding marketplace to Claude Code
Resolves marketplace installation issues by aligning with format used by other Claude Code marketplaces.
- Address PR comment #38 - remove duplicate User-Agent header. [Jacob
Magar]
Resolves review thread PRRT_kwDOO6Hdxs5uu7z7
- Removed redundant User-Agent header from per-request headers in make_graphql_request()
- User-Agent is already set as default header on the shared HTTP client
- httpx merges per-request headers with client defaults, so client-level default is sufficient
- Harden read-logs.sh against GraphQL injection and path traversal.
[Jacob Magar]
- Remove slashes from LOG_NAME regex to block path traversal (e.g.
../../etc/passwd). Only alphanumeric, dots, hyphens, underscores allowed.
- Cap LINES to 1-10000 range to prevent resource exhaustion.
- Add query script existence check before execution.
- Add query failure, empty response, and invalid JSON guards.
Resolves review thread PRRT_kwDOO6Hdxs5uvKrj
- Address 5 critical and major PR review issues. [Jacob Magar]
- Remove set -e from validate-marketplace.sh to prevent early exit on
check failures, allowing the summary to always be displayed (PRRT_kwDOO6Hdxs5uvKrc)
- Fix marketplace.json source path to point to skills/unraid instead of
./ for correct plugin directory resolution (PRRT_kwDOO6Hdxs5uvKrg)
- Fix misleading trap registration comment in unraid-api-crawl.md and
add auth note to Apollo Studio URL (PRRT_kwDOO6Hdxs5uvO2t)
- Extract duplicated cleanup-with-error-handling in main.py into
_run_shutdown_cleanup() helper (PRRT_kwDOO6Hdxs5uvO3A)
- Add input validation to read-logs.sh to prevent GraphQL injection
via LOG_NAME and LINES parameters (PRRT_kwDOO6Hdxs5uvKrj)
- Address PR review comments on test suite. [Jacob Magar]
- Rename test_start_http_401_unauthorized to test_list_http_401_unauthorized
to match the actual action="list" being tested (threads #19, #23)
- Use consistent PrefixedID format ("a"*64+":local") in test_start_container
instead of "abc123def456"*4 concatenation (thread #37)
- Refactor container_actions_require_id to use @pytest.mark.parametrize
so each action runs independently (thread #18)
- Fix docstring claiming ToolError for test that asserts success in
test_stop_mutation_returns_null (thread #26)
- Fix inaccurate comment about `in` operator checking truthiness;
it checks key existence (thread #25)
- Add edge case tests for temperature=0, temperature=null, and
logFile=null in test_storage.py (thread #31)
Resolves review threads: PRRT_kwDOO6Hdxs5uvO2-, PRRT_kwDOO6Hdxs5uvOcf,
PRRT_kwDOO6Hdxs5uu7zx, PRRT_kwDOO6Hdxs5uvO28, PRRT_kwDOO6Hdxs5uvOcp,
PRRT_kwDOO6Hdxs5uvOcn, PRRT_kwDOO6Hdxs5uvKr3
- Harden shell scripts with error handling and null guards. [Jacob
Magar]
- dashboard.sh: Add // [] jq null guard on .data.array.disks[] (L176-177)
Resolves review thread PRRT_kwDOO6Hdxs5uvO21
- dashboard.sh: Default NAME to server key when env var unset (L221)
Resolves review thread PRRT_kwDOO6Hdxs5uvO22
- unraid-query.sh: Check curl exit code, empty response, and JSON validity
before piping to jq (L112-129)
Resolves review thread PRRT_kwDOO6Hdxs5uvO24
- disk-health.sh: Guard against missing query script and invalid responses
Resolves review thread PRRT_kwDOO6Hdxs5uvKrh
- Address 54 MEDIUM/LOW priority PR review issues. [Jacob Magar]
Comprehensive fixes across Python code, shell scripts, and documentation
addressing all remaining MEDIUM and LOW priority review comments.
Python Code Fixes (27 fixes):
- tools/info.py: Simplified dispatch with lookup tables, defensive guards,
CPU fallback formatting, !s conversion flags, module-level sync assertion
- tools/docker.py: Case-insensitive container ID regex, keyword-only confirm,
module-level ALL_ACTIONS constant
- tools/virtualization.py: Normalized single-VM dict responses, unified
list/details queries
- core/client.py: Fixed HTTP client singleton race condition, compound key
substring matching for sensitive data redaction
- subscriptions/: Extracted SSL context creation to shared helper in utils.py,
replaced deprecated ssl._create_unverified_context API
- tools/array.py: Renamed parity_history to parity_status, hoisted ALL_ACTIONS
- tools/storage.py: Fixed dict(None) risks, temperature 0 falsiness bug
- tools/notifications.py, keys.py, rclone.py: Fixed dict(None) TypeError risks
- tests/: Fixed generator type annotations, added coverage for compound keys
Shell Script Fixes (13 fixes):
- dashboard.sh: Dynamic server discovery, conditional debug output, null-safe
jq, notification count guard order, removed unused variables
- unraid-query.sh: Proper JSON escaping via jq, --ignore-errors and --insecure
CLI flags, TLS verification now on by default
- validate-marketplace.sh: Removed unused YELLOW variable, defensive jq,
simplified repository URL output
Documentation Fixes (24+ fixes):
- Version consistency: Updated all references to v0.2.0 across pyproject.toml,
plugin.json, marketplace.json, MARKETPLACE.md, __init__.py, README files
- Tool count updates: Changed all "26 tools" references to "10 tools, 90 actions"
- Markdown lint: Fixed MD022, MD031, MD047 issues across multiple files
- Research docs: Fixed auth headers, removed web artifacts, corrected stale info
- Skills docs: Fixed query examples, endpoint counts, env var references
All 227 tests pass, ruff and ty checks clean.
- Update Subprotocol import and SSL handling in WebSocket modules.
[Jacob Magar]
- Change Subprotocol import from deprecated websockets.legacy.protocol
to websockets.typing (canonical location in websockets 13.x)
- Fix SSL context handling in diagnostics.py to properly build
ssl.SSLContext objects, matching the pattern in manager.py
(previously passed UNRAID_VERIFY_SSL directly which breaks
when it's a CA bundle path string)
- Resolve ruff lint issues in storage tool and tests. [Jacob Magar]
- Move _ALLOWED_LOG_PREFIXES to module level (N806: constant naming)
- Use f-string conversion flag {e!s} instead of {str(e)} (RUF010)
- Fix import block sorting in both files (I001)
- Address 18 CRITICAL+HIGH PR review comments. [Jacob Magar, config-
fixer, docker-fixer, info-fixer, keys-rclone-fixer, storage-fixer,
users-fixer, vm-fixer, websocket-fixer]
**Critical Fixes (7 issues):**
- Fix GraphQL schema field names in users tool (role→roles, remove email)
- Fix GraphQL mutation signatures (addUserInput, deleteUser input)
- Fix dict(None) TypeError guards in users tool (use `or {}` pattern)
- Fix FastAPI version constraint (0.116.1→0.115.0)
- Fix WebSocket SSL context handling (support CA bundles, bool, and None)
- Fix critical disk threshold treated as warning (split counters)
**High Priority Fixes (11 issues):**
- Fix Docker update/remove action response field mapping
- Fix path traversal vulnerability in log validation (normalize paths)
- Fix deleteApiKeys validation (check response before success)
- Fix rclone create_remote validation (check response)
- Fix keys input_data type annotation (dict[str, Any])
- Fix VM domain/domains fallback restoration
**Changes by file:**
- unraid_mcp/tools/docker.py: Response field mapping
- unraid_mcp/tools/info.py: Split critical/warning counters
- unraid_mcp/tools/storage.py: Path normalization for traversal protection
- unraid_mcp/tools/users.py: GraphQL schema + null handling
- unraid_mcp/tools/keys.py: Validation + type annotations
- unraid_mcp/tools/rclone.py: Response validation
- unraid_mcp/tools/virtualization.py: Domain fallback
- unraid_mcp/subscriptions/manager.py: SSL context creation
- pyproject.toml: FastAPI version fix
- tests/*: New tests for all fixes
**Review threads resolved:**
PRRT_kwDOO6Hdxs5uu70L, PRRT_kwDOO6Hdxs5uu70O, PRRT_kwDOO6Hdxs5uu70V,
PRRT_kwDOO6Hdxs5uu70e, PRRT_kwDOO6Hdxs5uu70i, PRRT_kwDOO6Hdxs5uu7zn,
PRRT_kwDOO6Hdxs5uu7z_, PRRT_kwDOO6Hdxs5uu7sI, PRRT_kwDOO6Hdxs5uu7sJ,
PRRT_kwDOO6Hdxs5uu7sK, PRRT_kwDOO6Hdxs5uu7Tk, PRRT_kwDOO6Hdxs5uu7Tn,
PRRT_kwDOO6Hdxs5uu7Tr, PRRT_kwDOO6Hdxs5uu7Ts, PRRT_kwDOO6Hdxs5uu7Tu,
PRRT_kwDOO6Hdxs5uu7Tv, PRRT_kwDOO6Hdxs5uu7Tw, PRRT_kwDOO6Hdxs5uu7Tx
All tests passing.
- Add type annotation to resolve mypy Literal narrowing error. [Jacob
Magar]
Other
~~~~~
- Ci: add Homelab CI framework. [Simon Diesenreiter]
- Refactor: move MCP server config inline to plugin.json. [Jacob Magar]
Move MCP server configuration from standalone .mcp.json to inline
definition in plugin.json. This consolidates all plugin metadata
in a single location.
- Add type: stdio and env fields to inline config
- Remove redundant .mcp.json file
- Maintains same functionality with cleaner structure
- Feat: add MCP server configuration for Claude Code plugin integration.
[Jacob Magar]
Add .mcp.json to configure the Unraid MCP server as a stdio-based MCP
server for Claude Code plugin integration. This allows Claude Code to
automatically start and connect to the server when the plugin is loaded.
- Type: stdio (standard input/output communication)
- Command: uv run unraid-mcp-server
- Forces stdio transport mode via UNRAID_MCP_TRANSPORT env var
- Docs: fix markdown lint, broken links, stale counts, and publishing
guidance. [Jacob Magar]
- Fix broken ToC anchors in competitive-analysis.md (MD051)
- Add blank lines before code blocks in api-reference.md (MD031)
- Add language identifiers to directory tree code blocks in MARKETPLACE.md and skills/unraid/README.md (MD040)
- Fix size unit guidance conflict: clarify disk sizes are KB, memory is bytes
- Update stale "90 actions" references to "76 actions" across research docs
- Fix coverage table terminology and clarify 22% coverage calculation
- Recommend PyPI Trusted Publishing (OIDC) over API token secrets in PUBLISHING.md
- Update action count in .claude-plugin/README.md
Resolves review threads: PRRT_kwDOO6Hdxs5uvO2m, PRRT_kwDOO6Hdxs5uvO2o,
PRRT_kwDOO6Hdxs5uvO2r, PRRT_kwDOO6Hdxs5uvOcl, PRRT_kwDOO6Hdxs5uvOcr,
PRRT_kwDOO6Hdxs5uvKrq, PRRT_kwDOO6Hdxs5uvO2u, PRRT_kwDOO6Hdxs5uvO2w,
PRRT_kwDOO6Hdxs5uvO2z, PRRT_kwDOO6Hdxs5uu7zl
- Feat: enhance test suite with 275 new tests across 4 validation
categories. [Claude, Jacob Magar]
Add comprehensive test coverage beyond unit tests:
- Schema validation (93 tests): Validate all GraphQL queries/mutations against extracted Unraid API schema
- HTTP layer (88 tests): Test request construction, timeouts, and error handling at httpx level
- Subscriptions (55 tests): WebSocket lifecycle, reconnection, and protocol validation
- Safety audit (39 tests): Enforce destructive action confirmation requirements
Total test count increased from 210 to 485 (130% increase), all passing in 5.91s.
New dependencies:
- graphql-core>=3.2.0 for schema validation
- respx>=0.22.0 for HTTP layer mocking
Files created:
- docs/unraid-schema.graphql (150-type GraphQL schema)
- tests/schema/test_query_validation.py
- tests/http_layer/test_request_construction.py
- tests/integration/test_subscriptions.py
- tests/safety/test_destructive_guards.py
- Feat: harden API safety and expand command docs with full test
coverage. [Jacob Magar]
- Refactor: move plugin manifest to repository root per Claude Code best
practices. [Jacob Magar]
- Move plugin.json from skills/unraid/.claude-plugin/ to .claude-plugin/
- Update validation script to use correct plugin manifest path
- Add plugin structure section to root README.md
- Add installation instructions to skills/unraid/README.md
- Aligns with Claude Code's expectation for source: './' in marketplace.json
- Chore: enhance project metadata, tooling, and documentation. [Claude,
Jacob Magar]
**Project Configuration:**
- Enhance pyproject.toml with comprehensive metadata, keywords, and classifiers
- Add LICENSE file (MIT) for proper open-source distribution
- Add PUBLISHING.md with comprehensive publishing guidelines
- Update .gitignore to exclude tool artifacts (.cache, .pytest_cache, .ruff_cache, .ty_cache)
- Ignore documentation working directories (.docs, .full-review, docs/plans, docs/sessions)
**Documentation:**
- Add extensive Unraid API research documentation
- API source code analysis and resolver mapping
- Competitive analysis and feature gap assessment
- Release notes analysis (7.0.0, 7.1.0, 7.2.0)
- Connect platform overview and remote access documentation
- Document known API patterns, limitations, and edge cases
**Testing & Code Quality:**
- Expand test coverage across all tool modules
- Add destructive action confirmation tests
- Improve test assertions and error case validation
- Refine type annotations for better static analysis
**Tool Improvements:**
- Enhance error handling consistency across all tools
- Improve type safety with explicit type annotations
- Refine GraphQL query construction patterns
- Better handling of optional parameters and edge cases
This commit prepares the project for v0.2.0 release with improved
metadata, comprehensive documentation, and enhanced code quality.
- Feat: consolidate 26 tools into 10 tools with 90 actions. [Jacob
Magar]
Refactor the entire tool layer to use the consolidated action pattern
(action: Literal[...] with QUERIES/MUTATIONS dicts). This reduces LLM
context from ~12k to ~5k tokens while adding ~60 new API capabilities.
New tools: unraid_info (19 actions), unraid_array (12), unraid_notifications (9),
unraid_users (8), unraid_keys (5). Rewritten: unraid_docker (15), unraid_vm (9),
unraid_storage (6), unraid_rclone (4), unraid_health (3).
Includes 129 tests across 10 test files, code review fixes for 16 issues
(severity ordering, PrefixedID regex, sensitive var redaction, etc.).
Removes tools/system.py (replaced by tools/info.py). Version bumped to 0.2.0.
- Chore: update .gitignore. [Jacob Magar]
- Move pid and log files to /tmp directory. [Claude, Jacob Magar]
- Update dev.sh to use /tmp for LOG_DIR instead of PROJECT_DIR/logs
- Update settings.py to use /tmp for LOGS_DIR instead of PROJECT_ROOT/logs
- This change moves both pid files and log files to the temporary directory
🤖 Generated with [Claude Code](https://claude.ai/code)
- Remove env_file from docker-compose and use explicit environment
variables. [Claude, Jacob Magar]
- Remove env_file directive from docker-compose.yml to eliminate .env file dependency
- Add explicit environment variable declarations with default values using ${VAR:-default} syntax
- Update port mapping to use UNRAID_MCP_PORT environment variable for both host and container
- Include all 11 environment variables used by the application with proper defaults
- Update README.md Docker deployment instructions to use export commands instead of .env files
- Update manual Docker run command to use -e flags instead of --env-file
This makes Docker deployment self-contained and follows container best practices.
🤖 Generated with [Claude Code](https://claude.ai/code)
- Replace log rotation with 10MB overwrite behavior. [Claude, Jacob
Magar]
- Create OverwriteFileHandler class that caps log files at 10MB and overwrites instead of rotating
- Remove RotatingFileHandler dependency and backup file creation
- Add reset marker logging when file limit is reached for troubleshooting
- Update all logger configurations (main, FastMCP, and root loggers)
- Increase file size limit from 5MB to 10MB as requested
- Maintain existing Rich console formatting and error handling
🤖 Generated with [Claude Code](https://claude.ai/code)
- Align documentation and Docker configuration with current
implementation. [Claude, Jacob Magar]
- Fix README.md: Make Docker deployment recommended, remove duplicate installation section
- Fix Dockerfile: Copy correct source files (unraid_mcp/, uv.lock, README.md) instead of non-existent unraid_mcp_server.py
- Update docker-compose.yml: Enable build configuration and use .env instead of .env.local
- Add missing environment variables to .env.example and .env: UNRAID_AUTO_START_SUBSCRIPTIONS, UNRAID_MAX_RECONNECT_ATTEMPTS
- Fix CLAUDE.md: Correct environment hierarchy documentation (../env.local → ../.env.local)
- Remove unused unraid-schema.json file
🤖 Generated with [Claude Code](https://claude.ai/code)
- Lintfree. [Jacob Magar]
- Add Claude Code agent configuration and GraphQL introspection.
[Claude, Jacob Magar]
- Added KFC (Kent Feature Creator) spec workflow agents for requirements, design, tasks, testing, implementation and evaluation
- Added Claude Code settings configuration for agent workflows
- Added GraphQL introspection query and schema files for Unraid API exploration
- Updated development script with additional debugging and schema inspection capabilities
- Enhanced logging configuration with structured formatting
- Updated pyproject.toml dependencies and uv.lock
🤖 Generated with [Claude Code](https://claude.ai/code)
- Remove unused MCP resources and update documentation. [Claude, Jacob
Magar]
- Remove array_status, system_info, notifications_overview, and parity_status resources
- Keep only logs_stream resource (unraid://logs/stream) which is working properly
- Update README.md with current resource documentation and modern docker compose syntax
- Fix import path issues that were causing subscription errors
- Update environment configuration examples
- Clean up subscription manager to only include working log streaming
🤖 Generated with [Claude Code](https://claude.ai/code)
- Migrate to uv and FastMCP architecture with comprehensive tooling.
[Claude, Jacob Magar]
- Replace pip/requirements.txt with uv and pyproject.toml
- Restructure as single-file MCP server using FastMCP
- Add comprehensive Unraid management tools (containers, VMs, storage, logs)
- Implement multiple transport support (streamable-http, SSE, stdio)
- Add robust error handling and timeout management
- Include project documentation and API feature tracking
- Remove outdated cline documentation structure
🤖 Generated with [Claude Code](https://claude.ai/code)
- Update docker-compose.yml. [Jacob Magar]

26
Makefile Normal file
View File

@@ -0,0 +1,26 @@
.ONESHELL:
VERSION ?= $(shell cat ./VERSION)
.PHONY: issetup
issetup:
@[ -f .git/hooks/commit-msg ] || [ $SKIP_MAKE_SETUP_CHECK = "true" ] || (echo "You must run 'make setup' first to initialize the repo!" && exit 1)
.PHONY: setup
setup:
@cp .gitea/conventional_commits/commit-msg .git/hooks/
.PHONY: help
help: ## Show the help.
@echo "Usage: make <target>"
@echo ""
@echo "Targets:"
@fgrep "##" Makefile | fgrep -v fgrep
.PHONY: release
release: issetup ## Create a new tag for release.
@./.gitea/conventional_commits/generate-version.sh
.PHONY: build
build: issetup
@docker build -t unraid-mcp:${VERSION} .

3
NOTICE.md Normal file
View File

@@ -0,0 +1,3 @@
# Notice
This is a fork of an externally maintained repository. Only intended for internal use in HomeLab!

View File

@@ -218,13 +218,12 @@ UNRAID_VERIFY_SSL=true # true, false, or path to CA bundle
Each tool uses a consolidated `action` parameter to expose multiple operations, reducing context window usage. Destructive actions require `confirm=True`. Each tool uses a consolidated `action` parameter to expose multiple operations, reducing context window usage. Destructive actions require `confirm=True`.
### Tool Categories (10 Tools, 76 Actions) ### Tool Categories (9 Tools, 69 Actions)
| Tool | Actions | Description | | Tool | Actions | Description |
|------|---------|-------------| |------|---------|-------------|
| **`unraid_info`** | 19 | overview, array, network, registration, connect, variables, metrics, services, display, config, online, owner, settings, server, servers, flash, ups_devices, ups_device, ups_config | | **`unraid_info`** | 19 | overview, array, network, registration, connect, variables, metrics, services, display, config, online, owner, settings, server, servers, flash, ups_devices, ups_device |
| **`unraid_array`** | 5 | parity_start, parity_pause, parity_resume, parity_cancel, parity_status | | **`unraid_storage`** | 6 | shares, disks, disk_details, log_files, logs |
| **`unraid_storage`** | 6 | shares, disks, disk_details, unassigned, log_files, logs |
| **`unraid_docker`** | 15 | list, details, start, stop, restart, pause, unpause, remove, update, update_all, logs, networks, network_details, port_conflicts, check_updates | | **`unraid_docker`** | 15 | list, details, start, stop, restart, pause, unpause, remove, update, update_all, logs, networks, network_details, port_conflicts, check_updates |
| **`unraid_vm`** | 9 | list, details, start, stop, pause, resume, force_stop, reboot, reset | | **`unraid_vm`** | 9 | list, details, start, stop, pause, resume, force_stop, reboot, reset |
| **`unraid_notifications`** | 9 | overview, list, warnings, create, archive, unread, delete, delete_archived, archive_all | | **`unraid_notifications`** | 9 | overview, list, warnings, create, archive, unread, delete, delete_archived, archive_all |
@@ -242,14 +241,13 @@ Each tool uses a consolidated `action` parameter to expose multiple operations,
## 💬 Custom Slash Commands ## 💬 Custom Slash Commands
The project includes **10 custom slash commands** in `commands/` for quick access to Unraid operations: The project includes **9 custom slash commands** in `commands/` for quick access to Unraid operations:
### Available Commands ### Available Commands
| Command | Actions | Quick Access | | Command | Actions | Quick Access |
|---------|---------|--------------| |---------|---------|--------------|
| `/info` | 19 | System information, metrics, configuration | | `/info` | 19 | System information, metrics, configuration |
| `/array` | 5 | Parity check management |
| `/storage` | 6 | Shares, disks, logs | | `/storage` | 6 | Shares, disks, logs |
| `/docker` | 15 | Container management and monitoring | | `/docker` | 15 | Container management and monitoring |
| `/vm` | 9 | Virtual machine lifecycle | | `/vm` | 9 | Virtual machine lifecycle |

1
VERSION Normal file
View File

@@ -0,0 +1 @@
0.0.6

View File

@@ -1,30 +0,0 @@
---
description: Manage Unraid array parity checks
argument-hint: [action] [correct=true/false]
---
Execute the `unraid_array` MCP tool with action: `$1`
## Available Actions (5)
**Parity Check Operations:**
- `parity_start` - Start parity check/sync (optional: correct=true to fix errors)
- `parity_pause` - Pause running parity operation
- `parity_resume` - Resume paused parity operation
- `parity_cancel` - Cancel running parity operation
- `parity_status` - Get current parity check status
## Example Usage
```
/array parity_start
/array parity_start correct=true
/array parity_pause
/array parity_resume
/array parity_cancel
/array parity_status
```
**Note:** Use `correct=true` with `parity_start` to automatically fix any parity errors found during the check.
Use the tool to execute the requested parity operation and report the results.

View File

@@ -18,7 +18,6 @@ Execute the `unraid_info` MCP tool with action: `$1`
**Network & Registration:** **Network & Registration:**
- `network` - Network configuration and interfaces - `network` - Network configuration and interfaces
- `registration` - Registration status and license info - `registration` - Registration status and license info
- `connect` - Connect service configuration
- `online` - Online status check - `online` - Online status check
**Configuration:** **Configuration:**
@@ -32,7 +31,6 @@ Execute the `unraid_info` MCP tool with action: `$1`
- `metrics` - System metrics (CPU, RAM, disk I/O) - `metrics` - System metrics (CPU, RAM, disk I/O)
- `ups_devices` - List all UPS devices - `ups_devices` - List all UPS devices
- `ups_device` - Get specific UPS device details (requires device_id) - `ups_device` - Get specific UPS device details (requires device_id)
- `ups_config` - UPS configuration
**Ownership:** **Ownership:**
- `owner` - Server owner information - `owner` - Server owner information

View File

@@ -11,7 +11,6 @@ Execute the `unraid_storage` MCP tool with action: `$1`
- `shares` - List all user shares with sizes and allocation - `shares` - List all user shares with sizes and allocation
- `disks` - List all disks in the array - `disks` - List all disks in the array
- `disk_details` - Get detailed info for a specific disk (requires disk identifier) - `disk_details` - Get detailed info for a specific disk (requires disk identifier)
- `unassigned` - List unassigned devices
**Logs:** **Logs:**
- `log_files` - List available system log files - `log_files` - List available system log files
@@ -23,7 +22,6 @@ Execute the `unraid_storage` MCP tool with action: `$1`
/unraid-storage shares /unraid-storage shares
/unraid-storage disks /unraid-storage disks
/unraid-storage disk_details disk1 /unraid-storage disk_details disk1
/unraid-storage unassigned
/unraid-storage log_files /unraid-storage log_files
/unraid-storage logs /var/log/syslog /unraid-storage logs /var/log/syslog
``` ```

View File

@@ -541,8 +541,6 @@ Possible error states for configuration
- Fields (3): - Fields (3):
- `api`: `String` - `api`: `String`
- Unraid API version - Unraid API version
- `kernel`: `String`
- Kernel version
- `unraid`: `String` - `unraid`: `String`
- Unraid version - Unraid version
@@ -1021,8 +1019,6 @@ The `ID` scalar type represents a unique identifier, often used to refetch an ob
- `hostname`: `String` - `hostname`: `String`
- Hostname - Hostname
- `id`: `PrefixedID!` - `id`: `PrefixedID!`
- `kernel`: `String`
- Kernel version
- `logofile`: `String` - `logofile`: `String`
- OS logo name - OS logo name
- `platform`: `String` - `platform`: `String`
@@ -1426,8 +1422,6 @@ System metrics including CPU and memory utilization
- Node.js version - Node.js version
- `npm`: `String` - `npm`: `String`
- npm version - npm version
- `openssl`: `String`
- OpenSSL version
- `php`: `String` - `php`: `String`
- PHP version - PHP version
- `pm2`: `String` - `pm2`: `String`

View File

@@ -355,7 +355,6 @@ The project's documentation explicitly compares SSH vs API capabilities:
| Network config | Y | Y | Y | Y | N | N | N | | Network config | Y | Y | Y | Y | N | N | N |
| Network bandwidth | N | Y | N | Y | N | N | N | | Network bandwidth | N | Y | N | Y | N | N | N |
| Registration/license info | Y | Y | Y | N | N | N | N | | Registration/license info | Y | Y | Y | N | N | N | N |
| Connect settings | Y | Y | Y | N | N | N | N |
| Unraid variables | Y | Y | Y | N | N | N | N | | Unraid variables | Y | Y | Y | N | N | N | N |
| System services status | N | Y | Y | N | N | N | N | | System services status | N | Y | Y | N | N | N | N |
| Flash drive info | N | Y | Y | N | N | Y | N | | Flash drive info | N | Y | Y | N | N | Y | N |

View File

@@ -27,7 +27,7 @@ Every query type identified across all research documents, with their fields and
| Query | Fields | Current MCP Coverage | | Query | Fields | Current MCP Coverage |
|-------|--------|---------------------| |-------|--------|---------------------|
| `info` | `time`, `baseboard { manufacturer, model, version, serial }`, `cpu { manufacturer, brand, vendor, family, model, stepping, revision, voltage, speed, speedmin, speedmax, threads, cores, processors, socket, cache, flags }`, `devices`, `display`, `machineId`, `memory { max, total, free, used, active, available, buffcache, swaptotal, swapused, swapfree, layout[] }`, `os { platform, distro, release, codename, kernel, arch, hostname, codepage, logofile, serial, build, uptime }`, `system { manufacturer, model, version, serial, uuid }`, `versions { kernel, docker, unraid, node }`, `apps { installed, started }` | **YES** - `get_system_info()` | | `info` | `time`, `baseboard { manufacturer, model, version, serial }`, `cpu { manufacturer, brand, vendor, family, model, stepping, revision, voltage, speed, speedmin, speedmax, threads, cores, processors, socket, cache, flags }`, `devices`, `display`, `machineId`, `memory { max, total, free, used, active, available, buffcache, swaptotal, swapused, swapfree, layout[] }`, `os { platform, distro, release, codename, arch, hostname, logofile, serial, build, uptime }`, `system { manufacturer, model, version, serial, uuid }`, `versions { docker, unraid, node }` | **YES** - `get_system_info()` |
| `vars` | `id`, `version`, `name`, `timeZone`, `comment`, `security`, `workgroup`, `domain`, `useNtp`, `ntpServer1-4`, `useSsl`, `port`, `portssl`, `useTelnet`, `useSsh`, `portssh`, `startPage`, `startArray`, `spindownDelay`, `defaultFormat`, `defaultFsType`, `shutdownTimeout`, `shareDisk`, `shareUser`, `shareSmbEnabled`, `shareNfsEnabled`, `shareAfpEnabled`, `shareCacheEnabled`, `shareMoverSchedule`, `shareMoverLogging`, `safeMode`, `configValid`, `configError`, `deviceCount`, `flashGuid`, `flashProduct`, `flashVendor`, `regState`, `regTo`, `mdState`, `mdNumDisks`, `mdNumDisabled`, `mdNumInvalid`, `mdNumMissing`, `mdResync`, `mdResyncAction`, `fsState`, `fsProgress`, `fsCopyPrcnt`, `shareCount`, `shareSmbCount`, `shareNfsCount`, `csrfToken`, `maxArraysz`, `maxCachesz` | **YES** - `get_unraid_variables()` | | `vars` | `id`, `version`, `name`, `timeZone`, `comment`, `security`, `workgroup`, `domain`, `useNtp`, `ntpServer1-4`, `useSsl`, `port`, `portssl`, `useTelnet`, `useSsh`, `portssh`, `startPage`, `startArray`, `spindownDelay`, `defaultFormat`, `defaultFsType`, `shutdownTimeout`, `shareDisk`, `shareUser`, `shareSmbEnabled`, `shareNfsEnabled`, `shareAfpEnabled`, `shareCacheEnabled`, `shareMoverSchedule`, `shareMoverLogging`, `safeMode`, `configValid`, `configError`, `deviceCount`, `flashGuid`, `flashProduct`, `flashVendor`, `regState`, `regTo`, `mdState`, `mdNumDisks`, `mdNumDisabled`, `mdNumInvalid`, `mdNumMissing`, `mdResync`, `mdResyncAction`, `fsState`, `fsProgress`, `fsCopyPrcnt`, `shareCount`, `shareSmbCount`, `shareNfsCount`, `csrfToken`, `maxArraysz`, `maxCachesz` | **YES** - `get_unraid_variables()` |
| `online` | `Boolean` | **NO** | | `online` | `Boolean` | **NO** |
| `owner` | Server owner information | **NO** | | `owner` | Server owner information | **NO** |
@@ -378,15 +378,14 @@ GRAPHQL_PUBSUB_CHANNEL {
| `ContainerHostConfig` | JSON host configuration | | | `ContainerHostConfig` | JSON host configuration | |
| `VmDomain` | `uuid/id`, `name`, `state` | Implements Node | | `VmDomain` | `uuid/id`, `name`, `state` | Implements Node |
| `Vms` | `id`, `domain[]` | | | `Vms` | `id`, `domain[]` | |
| `Info` | `time`, `baseboard`, `cpu`, `devices`, `display`, `machineId`, `memory`, `os`, `system`, `versions`, `apps` | Implements Node | | `Info` | `time`, `baseboard`, `cpu`, `devices`, `display`, `machineId`, `memory`, `os`, `system`, `versions` | Implements Node |
| `InfoCpu` | `manufacturer`, `brand`, `vendor`, `family`, `model`, `stepping`, `revision`, `voltage`, `speed`, `speedmin`, `speedmax`, `threads`, `cores`, `processors`, `socket`, `cache`, `flags` | | | `InfoCpu` | `manufacturer`, `brand`, `vendor`, `family`, `model`, `stepping`, `revision`, `voltage`, `speed`, `speedmin`, `speedmax`, `threads`, `cores`, `processors`, `socket`, `cache`, `flags` | |
| `InfoMemory` | `max`, `total`, `free`, `used`, `active`, `available`, `buffcache`, `swaptotal`, `swapused`, `swapfree`, `layout[]` | | | `InfoMemory` | `max`, `total`, `free`, `used`, `active`, `available`, `buffcache`, `swaptotal`, `swapused`, `swapfree`, `layout[]` | |
| `MemoryLayout` | `bank`, `type`, `clockSpeed`, `manufacturer` | Missing `size` field (known bug) | | `MemoryLayout` | `bank`, `type`, `clockSpeed`, `manufacturer` | Missing `size` field (known bug) |
| `Os` | `platform`, `distro`, `release`, `codename`, `kernel`, `arch`, `hostname`, `codepage`, `logofile`, `serial`, `build`, `uptime` | | | `Os` | `platform`, `distro`, `release`, `codename`, `arch`, `hostname`, `logofile`, `serial`, `build`, `uptime` | |
| `Baseboard` | `manufacturer`, `model`, `version`, `serial` | | | `Baseboard` | `manufacturer`, `model`, `version`, `serial` | |
| `SystemInfo` | `manufacturer`, `model`, `version`, `serial`, `uuid` | | | `SystemInfo` | `manufacturer`, `model`, `version`, `serial`, `uuid` | |
| `Versions` | `kernel`, `docker`, `unraid`, `node` | | | `Versions` | `docker`, `unraid`, `node` | |
| `InfoApps` | `installed`, `started` | |
| `Network` | `iface`, `ifaceName`, `ipv4`, `ipv6`, `mac`, `internal`, `operstate`, `type`, `duplex`, `mtu`, `speed`, `carrierChanges`, `id`, `accessUrls[]` | Implements Node | | `Network` | `iface`, `ifaceName`, `ipv4`, `ipv6`, `mac`, `internal`, `operstate`, `type`, `duplex`, `mtu`, `speed`, `carrierChanges`, `id`, `accessUrls[]` | Implements Node |
| `AccessUrl` | `type`, `name`, `ipv4`, `ipv6` | | | `AccessUrl` | `type`, `name`, `ipv4`, `ipv6` | |
| `Share` | `name`, `free`, `used`, `size`, `include[]`, `exclude[]`, `cache`, `nameOrig`, `comment`, `allocator`, `splitLevel`, `floor`, `cow`, `color`, `luksStatus` | | | `Share` | `name`, `free`, `used`, `size`, `include[]`, `exclude[]`, `cache`, `nameOrig`, `comment`, `allocator`, `splitLevel`, `floor`, `cow`, `color`, `luksStatus` | |
@@ -565,7 +564,6 @@ The current MCP server has 10 tools (76 actions) after consolidation. The follow
|--------------|---------------|---------------| |--------------|---------------|---------------|
| `list_ups_devices()` | `upsDevices` query | UPS monitoring | | `list_ups_devices()` | `upsDevices` query | UPS monitoring |
| `get_ups_device(id)` | `upsDeviceById` query | UPS details | | `get_ups_device(id)` | `upsDeviceById` query | UPS details |
| `get_ups_configuration()` | `upsConfiguration` query | UPS config |
| `configure_ups(config)` | `configureUps` mutation | UPS management | | `configure_ups(config)` | `configureUps` mutation | UPS management |
#### System Metrics (0 tools currently, 1 query + 3 subscriptions) #### System Metrics (0 tools currently, 1 query + 3 subscriptions)

View File

@@ -120,7 +120,7 @@ Learn More about the Unraid API
* Linux Kernel 6.12.54-Unraid * Linux Kernel 6.12.54-Unraid
* Samba 4.23.2 * Samba 4.23.2
* Updated versions of openssl, mesa, kernel-firmware, git, exfatprogs, and more * Updated versions of mesa, kernel-firmware, git, exfatprogs, and more
**Plugin Compatibility Notice** **Plugin Compatibility Notice**
------------------------------- -------------------------------

View File

@@ -665,7 +665,6 @@ type Query {
servers: [Server!]! servers: [Server!]!
services: [Service!]! services: [Service!]!
shares: [Share] shares: [Share]
unassignedDevices: [UnassignedDevice]
me: Me me: Me
user(id: ID!): User user(id: ID!): User
users(input: usersInput): [User!]! users(input: usersInput): [User!]!
@@ -743,7 +742,6 @@ type Subscription {
service(name: String!): [Service!] service(name: String!): [Service!]
share(id: ID!): Share! share(id: ID!): Share!
shares: [Share!] shares: [Share!]
unassignedDevices: [UnassignedDevice!]
me: Me me: Me
user(id: ID!): User! user(id: ID!): User!
users: [User]! users: [User]!
@@ -892,7 +890,6 @@ type DockerNetwork {
```graphql ```graphql
type Info implements Node { type Info implements Node {
apps: InfoApps
baseboard: Baseboard baseboard: Baseboard
cpu: InfoCpu cpu: InfoCpu
devices: Devices devices: Devices
@@ -945,10 +942,8 @@ type Os {
distro: String distro: String
release: String release: String
codename: String codename: String
kernel: String
arch: String arch: String
hostname: String hostname: String
codepage: String
logofile: String logofile: String
serial: String serial: String
build: String build: String

View File

@@ -220,13 +220,12 @@ Then access at `http://YOUR_SERVER_IP/graphql` to explore the schema via Apollo
```graphql ```graphql
query { query {
info { info {
os { platform distro release uptime hostname arch kernel } os { platform distro release uptime hostname arch }
cpu { manufacturer brand cores threads } cpu { manufacturer brand cores threads }
memory { layout { bank type clockSpeed manufacturer } } memory { layout { bank type clockSpeed manufacturer } }
baseboard { manufacturer model version serial } baseboard { manufacturer model version serial }
system { manufacturer model version serial uuid } system { manufacturer model version serial uuid }
versions { kernel docker unraid node } versions { docker unraid node }
apps { installed started }
machineId machineId
time time
} }

View File

@@ -468,7 +468,6 @@ type Config implements Node {
type CoreVersions { type CoreVersions {
api: String api: String
kernel: String
unraid: String unraid: String
} }
@@ -478,7 +477,6 @@ type PackageVersions {
nginx: String nginx: String
node: String node: String
npm: String npm: String
openssl: String
php: String php: String
pm2: String pm2: String
} }
@@ -487,33 +485,6 @@ type InfoVersions implements Node {
id: PrefixedID! id: PrefixedID!
core: CoreVersions! core: CoreVersions!
packages: PackageVersions packages: PackageVersions
# Flattened fields used by the MCP tool queries (may exist in live API)
kernel: String
openssl: String
systemOpenssl: String
systemOpensslLib: String
node: String
v8: String
npm: String
yarn: String
pm2: String
gulp: String
grunt: String
git: String
tsc: String
mysql: String
redis: String
mongodb: String
apache: String
nginx: String
php: String
docker: String
postfix: String
postgresql: String
perl: String
python: String
gcc: String
unraid: String
} }
type InfoOs implements Node { type InfoOs implements Node {
@@ -522,7 +493,6 @@ type InfoOs implements Node {
distro: String distro: String
release: String release: String
codename: String codename: String
kernel: String
arch: String arch: String
hostname: String hostname: String
logofile: String logofile: String
@@ -532,7 +502,6 @@ type InfoOs implements Node {
fqdn: String fqdn: String
servicepack: String servicepack: String
uefi: Boolean uefi: Boolean
codepage: String
} }
type InfoCpu implements Node { type InfoCpu implements Node {
@@ -714,11 +683,6 @@ type InfoDisplay implements Node {
wwn: Boolean! wwn: Boolean!
} }
type Apps {
installed: Int
started: Int
}
type Info implements Node { type Info implements Node {
id: PrefixedID! id: PrefixedID!
os: InfoOs! os: InfoOs!
@@ -729,13 +693,13 @@ type Info implements Node {
versions: InfoVersions! versions: InfoVersions!
devices: InfoDevices! devices: InfoDevices!
display: InfoDisplay! display: InfoDisplay!
apps: Apps
machineId: ID machineId: ID
time: DateTime! time: DateTime!
} }
type MetricsCpu { type MetricsCpu {
used: Float percentTotal: Float!
cpus: [CPULoad!]!
} }
type MetricsMemory { type MetricsMemory {
@@ -752,7 +716,6 @@ type Metrics implements Node {
type Service implements Node { type Service implements Node {
id: PrefixedID! id: PrefixedID!
name: String name: String
state: String
online: Boolean online: Boolean
uptime: Uptime uptime: Uptime
version: String version: String
@@ -788,12 +751,6 @@ type Registration implements Node {
updateExpiration: String updateExpiration: String
} }
type ConnectSettings {
status: String
sandbox: Boolean
flashGuid: String
}
type Owner { type Owner {
username: String! username: String!
avatar: String! avatar: String!
@@ -1171,7 +1128,6 @@ type ApiKey implements Node {
permissions: JSON permissions: JSON
createdAt: String! createdAt: String!
description: String description: String
lastUsed: String
} }
type ApiKeyMutations { type ApiKeyMutations {
@@ -1362,9 +1318,6 @@ type Query {
# Network (used by MCP tool) # Network (used by MCP tool)
network: Network network: Network
# Connect (used by MCP tool)
connect: ConnectSettings
} }
# ============================================================================ # ============================================================================

View File

@@ -1900,9 +1900,6 @@ type InfoOs implements Node {
"""OS codename""" """OS codename"""
codename: String codename: String
"""Kernel version"""
kernel: String
"""OS architecture""" """OS architecture"""
arch: String arch: String
@@ -1987,15 +1984,9 @@ type CoreVersions {
"""Unraid API version""" """Unraid API version"""
api: String api: String
"""Kernel version"""
kernel: String
} }
type PackageVersions { type PackageVersions {
"""OpenSSL version"""
openssl: String
"""Node.js version""" """Node.js version"""
node: String node: String

View File

@@ -1900,9 +1900,6 @@ type InfoOs implements Node {
"""OS codename""" """OS codename"""
codename: String codename: String
"""Kernel version"""
kernel: String
"""OS architecture""" """OS architecture"""
arch: String arch: String
@@ -1987,15 +1984,9 @@ type CoreVersions {
"""Unraid API version""" """Unraid API version"""
api: String api: String
"""Kernel version"""
kernel: String
} }
type PackageVersions { type PackageVersions {
"""OpenSSL version"""
openssl: String
"""Node.js version""" """Node.js version"""
node: String node: String

View File

@@ -783,17 +783,6 @@ class TestStorageToolRequests:
with pytest.raises(ToolError, match="log_path must start with"): with pytest.raises(ToolError, match="log_path must start with"):
await tool(action="logs", log_path="/etc/shadow") await tool(action="logs", log_path="/etc/shadow")
@respx.mock
async def test_unassigned_sends_correct_query(self) -> None:
route = respx.post(API_URL).mock(
return_value=_graphql_response({"unassignedDevices": []})
)
tool = self._get_tool()
result = await tool(action="unassigned")
body = _extract_request_body(route.calls.last.request)
assert "GetUnassignedDevices" in body["query"]
assert "devices" in result
# =========================================================================== # ===========================================================================
# Section 10: Notifications tool request construction # Section 10: Notifications tool request construction

View File

@@ -142,12 +142,6 @@ class TestInfoQueries:
errors = _validate_operation(schema, QUERIES["ups_device"]) errors = _validate_operation(schema, QUERIES["ups_device"])
assert not errors, f"ups_device query validation failed: {errors}" assert not errors, f"ups_device query validation failed: {errors}"
def test_ups_config_query(self, schema: GraphQLSchema) -> None:
from unraid_mcp.tools.info import QUERIES
errors = _validate_operation(schema, QUERIES["ups_config"])
assert not errors, f"ups_config query validation failed: {errors}"
def test_all_info_actions_covered(self, schema: GraphQLSchema) -> None: def test_all_info_actions_covered(self, schema: GraphQLSchema) -> None:
"""Ensure every key in QUERIES has a corresponding test.""" """Ensure every key in QUERIES has a corresponding test."""
from unraid_mcp.tools.info import QUERIES from unraid_mcp.tools.info import QUERIES
@@ -156,7 +150,7 @@ class TestInfoQueries:
"overview", "array", "network", "registration", "connect", "overview", "array", "network", "registration", "connect",
"variables", "metrics", "services", "display", "config", "variables", "metrics", "services", "display", "config",
"online", "owner", "settings", "server", "servers", "online", "owner", "settings", "server", "servers",
"flash", "ups_devices", "ups_device", "ups_config", "flash", "ups_devices", "ups_device",
} }
assert set(QUERIES.keys()) == expected_actions assert set(QUERIES.keys()) == expected_actions
@@ -237,12 +231,6 @@ class TestStorageQueries:
errors = _validate_operation(schema, QUERIES["disk_details"]) errors = _validate_operation(schema, QUERIES["disk_details"])
assert not errors, f"disk_details query validation failed: {errors}" assert not errors, f"disk_details query validation failed: {errors}"
def test_unassigned_query(self, schema: GraphQLSchema) -> None:
from unraid_mcp.tools.storage import QUERIES
errors = _validate_operation(schema, QUERIES["unassigned"])
assert not errors, f"unassigned query validation failed: {errors}"
def test_log_files_query(self, schema: GraphQLSchema) -> None: def test_log_files_query(self, schema: GraphQLSchema) -> None:
from unraid_mcp.tools.storage import QUERIES from unraid_mcp.tools.storage import QUERIES
@@ -258,7 +246,7 @@ class TestStorageQueries:
def test_all_storage_queries_covered(self, schema: GraphQLSchema) -> None: def test_all_storage_queries_covered(self, schema: GraphQLSchema) -> None:
from unraid_mcp.tools.storage import QUERIES from unraid_mcp.tools.storage import QUERIES
expected = {"shares", "disks", "disk_details", "unassigned", "log_files", "logs"} expected = {"shares", "disks", "disk_details", "log_files", "logs"}
assert set(QUERIES.keys()) == expected assert set(QUERIES.keys()) == expected

View File

@@ -154,12 +154,6 @@ class TestStorageActions:
with pytest.raises(ToolError, match="not found"): with pytest.raises(ToolError, match="not found"):
await tool_fn(action="disk_details", disk_id="d:missing") await tool_fn(action="disk_details", disk_id="d:missing")
async def test_unassigned(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"unassignedDevices": []}
tool_fn = _make_tool()
result = await tool_fn(action="unassigned")
assert result["devices"] == []
async def test_log_files(self, _mock_graphql: AsyncMock) -> None: async def test_log_files(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"logFiles": [{"name": "syslog", "path": "/var/log/syslog"}]} _mock_graphql.return_value = {"logFiles": [{"name": "syslog", "path": "/var/log/syslog"}]}
tool_fn = _make_tool() tool_fn = _make_tool()

View File

@@ -18,7 +18,6 @@ from .config.settings import (
VERSION, VERSION,
) )
from .subscriptions.resources import register_subscription_resources from .subscriptions.resources import register_subscription_resources
from .tools.array import register_array_tool
from .tools.docker import register_docker_tool from .tools.docker import register_docker_tool
from .tools.health import register_health_tool from .tools.health import register_health_tool
from .tools.info import register_info_tool from .tools.info import register_info_tool
@@ -51,7 +50,6 @@ def register_all_modules() -> None:
# Register all consolidated tools # Register all consolidated tools
registrars = [ registrars = [
register_info_tool, register_info_tool,
register_array_tool,
register_storage_tool, register_storage_tool,
register_docker_tool, register_docker_tool,
register_vm_tool, register_vm_tool,

View File

@@ -2,7 +2,6 @@
10 consolidated tools with ~90 actions total: 10 consolidated tools with ~90 actions total:
unraid_info - System information queries (19 actions) unraid_info - System information queries (19 actions)
unraid_array - Array operations and power management (12 actions)
unraid_storage - Storage, disks, and logs (6 actions) unraid_storage - Storage, disks, and logs (6 actions)
unraid_docker - Docker container management (15 actions) unraid_docker - Docker container management (15 actions)
unraid_vm - Virtual machine management (9 actions) unraid_vm - Virtual machine management (9 actions)

View File

@@ -1,104 +0,0 @@
"""Array parity check operations.
Provides the `unraid_array` tool with 5 actions for parity check management.
"""
from typing import Any, Literal
from fastmcp import FastMCP
from ..config.logging import logger
from ..core.client import make_graphql_request
from ..core.exceptions import ToolError
QUERIES: dict[str, str] = {
"parity_status": """
query GetParityStatus {
array { parityCheckStatus { progress speed errors } }
}
""",
}
MUTATIONS: dict[str, str] = {
"parity_start": """
mutation StartParityCheck($correct: Boolean) {
parityCheck { start(correct: $correct) }
}
""",
"parity_pause": """
mutation PauseParityCheck {
parityCheck { pause }
}
""",
"parity_resume": """
mutation ResumeParityCheck {
parityCheck { resume }
}
""",
"parity_cancel": """
mutation CancelParityCheck {
parityCheck { cancel }
}
""",
}
ALL_ACTIONS = set(QUERIES) | set(MUTATIONS)
ARRAY_ACTIONS = Literal[
"parity_start",
"parity_pause",
"parity_resume",
"parity_cancel",
"parity_status",
]
def register_array_tool(mcp: FastMCP) -> None:
"""Register the unraid_array tool with the FastMCP instance."""
@mcp.tool()
async def unraid_array(
action: ARRAY_ACTIONS,
correct: bool | None = None,
) -> dict[str, Any]:
"""Manage Unraid array parity checks.
Actions:
parity_start - Start parity check (optional correct=True to fix errors)
parity_pause - Pause running parity check
parity_resume - Resume paused parity check
parity_cancel - Cancel running parity check
parity_status - Get current parity check status
"""
if action not in ALL_ACTIONS:
raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}")
try:
logger.info(f"Executing unraid_array action={action}")
if action in QUERIES:
data = await make_graphql_request(QUERIES[action])
return {"success": True, "action": action, "data": data}
query = MUTATIONS[action]
variables: dict[str, Any] | None = None
if action == "parity_start" and correct is not None:
variables = {"correct": correct}
data = await make_graphql_request(query, variables)
return {
"success": True,
"action": action,
"data": data,
}
except ToolError:
raise
except Exception as e:
logger.error(f"Error in unraid_array action={action}: {e}", exc_info=True)
raise ToolError(f"Failed to execute array/{action}: {e!s}") from e
logger.info("Array tool registered successfully")

View File

@@ -103,7 +103,6 @@ async def _comprehensive_check() -> dict[str, Any]:
query ComprehensiveHealthCheck { query ComprehensiveHealthCheck {
info { info {
machineId time machineId time
versions { unraid }
os { uptime } os { uptime }
} }
array { state } array { state }

View File

@@ -18,15 +18,13 @@ QUERIES: dict[str, str] = {
"overview": """ "overview": """
query GetSystemInfo { query GetSystemInfo {
info { info {
os { platform distro release codename kernel arch hostname codepage logofile serial build uptime } os { platform distro release codename arch hostname logofile serial build uptime }
cpu { manufacturer brand vendor family model stepping revision voltage speed speedmin speedmax threads cores processors socket cache flags } cpu { manufacturer brand vendor family model stepping revision voltage speed speedmin speedmax threads cores processors socket cache flags }
memory { memory {
layout { bank type clockSpeed formFactor manufacturer partNum serialNum } layout { bank type clockSpeed formFactor manufacturer partNum serialNum }
} }
baseboard { manufacturer model version serial assetTag } baseboard { manufacturer model version serial assetTag }
system { manufacturer model version serial uuid sku } system { manufacturer model version serial uuid sku }
versions { kernel openssl systemOpenssl systemOpensslLib node v8 npm yarn pm2 gulp grunt git tsc mysql redis mongodb apache nginx php docker postfix postgresql perl python gcc unraid }
apps { installed started }
machineId machineId
time time
} }
@@ -65,11 +63,6 @@ QUERIES: dict[str, str] = {
} }
} }
""", """,
"connect": """
query GetConnectSettings {
connect { status sandbox flashGuid }
}
""",
"variables": """ "variables": """
query GetSelectiveUnraidVariables { query GetSelectiveUnraidVariables {
vars { vars {
@@ -87,12 +80,12 @@ QUERIES: dict[str, str] = {
""", """,
"metrics": """ "metrics": """
query GetMetrics { query GetMetrics {
metrics { cpu { used } memory { used total } } metrics { cpu { percentTotal cpus { percentTotal } } memory { used total } }
} }
""", """,
"services": """ "services": """
query GetServices { query GetServices {
services { name state } services { name online uptime { timestamp } }
} }
""", """,
"display": """ "display": """
@@ -110,7 +103,7 @@ QUERIES: dict[str, str] = {
""", """,
"owner": """ "owner": """
query GetOwner { query GetOwner {
owner { username avatar url } owner { username avatar }
} }
""", """,
"settings": """ "settings": """
@@ -122,7 +115,6 @@ QUERIES: dict[str, str] = {
query GetServer { query GetServer {
info { info {
os { hostname uptime } os { hostname uptime }
versions { unraid }
machineId time machineId time
} }
array { state } array { state }
@@ -131,27 +123,22 @@ QUERIES: dict[str, str] = {
""", """,
"servers": """ "servers": """
query GetServers { query GetServers {
servers { id name status description ip port } servers { id name status lanip wanip }
} }
""", """,
"flash": """ "flash": """
query GetFlash { query GetFlash {
flash { id guid product vendor size } flash { id guid product vendor }
} }
""", """,
"ups_devices": """ "ups_devices": """
query GetUpsDevices { query GetUpsDevices {
upsDevices { id model status runtime charge load } upsDevices { id model status name battery { chargeLevel estimatedRuntime health } }
} }
""", """,
"ups_device": """ "ups_device": """
query GetUpsDevice($id: PrefixedID!) { query GetUpsDevice($id: PrefixedID!) {
upsDeviceById(id: $id) { id model status runtime charge load voltage frequency temperature } upsDeviceById(id: $id) { id model status name battery { chargeLevel estimatedRuntime health } power {loadPercentage inputVoltage outputVoltage } }
}
""",
"ups_config": """
query GetUpsConfig {
upsConfiguration { enabled mode cable driver port }
} }
""", """,
} }
@@ -161,7 +148,6 @@ INFO_ACTIONS = Literal[
"array", "array",
"network", "network",
"registration", "registration",
"connect",
"variables", "variables",
"metrics", "metrics",
"services", "services",
@@ -175,7 +161,6 @@ INFO_ACTIONS = Literal[
"flash", "flash",
"ups_devices", "ups_devices",
"ups_device", "ups_device",
"ups_config",
] ]
assert set(QUERIES.keys()) == set(INFO_ACTIONS.__args__), ( assert set(QUERIES.keys()) == set(INFO_ACTIONS.__args__), (
@@ -329,7 +314,6 @@ def register_info_tool(mcp: FastMCP) -> None:
array - Array state, capacity, disk health array - Array state, capacity, disk health
network - Access URLs, interfaces network - Access URLs, interfaces
registration - License type, state, expiration registration - License type, state, expiration
connect - Unraid Connect settings
variables - System variables and configuration variables - System variables and configuration
metrics - CPU and memory utilization metrics - CPU and memory utilization
services - Running services services - Running services
@@ -343,7 +327,6 @@ def register_info_tool(mcp: FastMCP) -> None:
flash - Flash drive info flash - Flash drive info
ups_devices - List UPS devices ups_devices - List UPS devices
ups_device - Single UPS device (requires device_id) ups_device - Single UPS device (requires device_id)
ups_config - UPS configuration
""" """
if action not in QUERIES: if action not in QUERIES:
raise ToolError(f"Invalid action '{action}'. Must be one of: {list(QUERIES.keys())}") raise ToolError(f"Invalid action '{action}'. Must be one of: {list(QUERIES.keys())}")
@@ -361,14 +344,12 @@ def register_info_tool(mcp: FastMCP) -> None:
dict_actions: dict[str, str] = { dict_actions: dict[str, str] = {
"network": "network", "network": "network",
"registration": "registration", "registration": "registration",
"connect": "connect",
"variables": "vars", "variables": "vars",
"metrics": "metrics", "metrics": "metrics",
"config": "config", "config": "config",
"owner": "owner", "owner": "owner",
"flash": "flash", "flash": "flash",
"ups_device": "upsDeviceById", "ups_device": "upsDeviceById",
"ups_config": "upsConfiguration",
} }
# List-wrapped actions: action -> (GraphQL response key, output key) # List-wrapped actions: action -> (GraphQL response key, output key)
list_actions: dict[str, tuple[str, str]] = { list_actions: dict[str, tuple[str, str]] = {

View File

@@ -16,12 +16,12 @@ from ..core.exceptions import ToolError
QUERIES: dict[str, str] = { QUERIES: dict[str, str] = {
"list": """ "list": """
query ListApiKeys { query ListApiKeys {
apiKeys { id name roles permissions createdAt lastUsed } apiKeys { id name roles permissions { resource actions } createdAt }
} }
""", """,
"get": """ "get": """
query GetApiKey($id: PrefixedID!) { query GetApiKey($id: PrefixedID!) {
apiKey(id: $id) { id name roles permissions createdAt lastUsed } apiKey(id: $id) { id name roles permissions { resource actions } createdAt }
} }
""", """,
} }

View File

@@ -1,7 +1,6 @@
"""Storage and disk management. """Storage and disk management.
Provides the `unraid_storage` tool with 6 actions for shares, physical disks, Provides the `unraid_storage` tool with 6 actions for shares, physical disks, log files, and log content retrieval.
unassigned devices, log files, and log content retrieval.
""" """
from typing import Any, Literal from typing import Any, Literal
@@ -37,11 +36,6 @@ QUERIES: dict[str, str] = {
} }
} }
""", """,
"unassigned": """
query GetUnassignedDevices {
unassignedDevices { id device name size type }
}
""",
"log_files": """ "log_files": """
query ListLogFiles { query ListLogFiles {
logFiles { name path size modifiedAt } logFiles { name path size modifiedAt }
@@ -60,7 +54,6 @@ STORAGE_ACTIONS = Literal[
"shares", "shares",
"disks", "disks",
"disk_details", "disk_details",
"unassigned",
"log_files", "log_files",
"logs", "logs",
] ]
@@ -97,7 +90,6 @@ def register_storage_tool(mcp: FastMCP) -> None:
shares - List all user shares with capacity info shares - List all user shares with capacity info
disks - List all physical disks disks - List all physical disks
disk_details - Detailed SMART info for a disk (requires disk_id) disk_details - Detailed SMART info for a disk (requires disk_id)
unassigned - List unassigned devices
log_files - List available log files log_files - List available log files
logs - Retrieve log content (requires log_path, optional tail_lines) logs - Retrieve log content (requires log_path, optional tail_lines)
""" """
@@ -158,10 +150,6 @@ def register_storage_tool(mcp: FastMCP) -> None:
} }
return {"summary": summary, "details": raw} return {"summary": summary, "details": raw}
if action == "unassigned":
devices = data.get("unassignedDevices", [])
return {"devices": list(devices) if isinstance(devices, list) else []}
if action == "log_files": if action == "log_files":
files = data.get("logFiles", []) files = data.get("logFiles", [])
return {"log_files": list(files) if isinstance(files, list) else []} return {"log_files": list(files) if isinstance(files, list) else []}