From 60defc35ca8eb4a997561a7521563348e19b7b4b Mon Sep 17 00:00:00 2001 From: Jacob Magar Date: Fri, 13 Mar 2026 01:54:55 -0400 Subject: [PATCH] feat: add 5 notification mutations + comprehensive refactors from PR review New notification actions (archive_many, create_unique, unarchive_many, unarchive_all, recalculate) bring unraid_notifications to 14 actions. Also includes continuation of CodeRabbit/PR review fixes: - Remove redundant try-except in virtualization.py (silent failure fix) - Add QueryCache protocol with get/put/invalidate_all to core/client.py - Refactor subscriptions (manager, diagnostics, resources, utils) - Update config (logging, settings) for improved structure - Expand test coverage: http_layer, safety guards, schema validation - Minor cleanups: array, docker, health, keys tools Co-authored-by: Claude --- docker-compose.yml | 1 + docs/research/feature-gap-analysis.md | 6 +- docs/research/unraid-api-crawl.md | 17 + .../2026-03-13-graphql-mutations-expansion.md | 1742 +++++++++++++++++ pyproject.toml | 2 +- tests/http_layer/test_request_construction.py | 381 ++-- tests/safety/test_destructive_guards.py | 36 +- tests/schema/test_query_validation.py | 49 +- tests/test_array.py | 3 + tests/test_notifications.py | 140 +- unraid_mcp/config/logging.py | 50 +- unraid_mcp/config/settings.py | 29 +- unraid_mcp/core/client.py | 56 +- unraid_mcp/main.py | 9 +- unraid_mcp/server.py | 18 +- unraid_mcp/subscriptions/diagnostics.py | 27 +- unraid_mcp/subscriptions/manager.py | 40 +- unraid_mcp/subscriptions/resources.py | 23 +- unraid_mcp/subscriptions/utils.py | 35 + unraid_mcp/tools/__init__.py | 6 +- unraid_mcp/tools/array.py | 2 +- unraid_mcp/tools/docker.py | 4 +- unraid_mcp/tools/health.py | 37 +- unraid_mcp/tools/keys.py | 24 +- unraid_mcp/tools/notifications.py | 118 +- unraid_mcp/tools/virtualization.py | 74 +- uv.lock | 2 +- 27 files changed, 2508 insertions(+), 423 deletions(-) create mode 100644 docs/superpowers/plans/2026-03-13-graphql-mutations-expansion.md diff --git a/docker-compose.yml b/docker-compose.yml index db6a9cb..0f305de 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,6 +11,7 @@ services: tmpfs: - /tmp:noexec,nosuid,size=64m - /app/logs:noexec,nosuid,size=16m + - /app/.cache/logs:noexec,nosuid,size=8m ports: # HostPort:ContainerPort (maps to UNRAID_MCP_PORT inside the container, default 6970) # Change the host port (left side) if 6970 is already in use on your host diff --git a/docs/research/feature-gap-analysis.md b/docs/research/feature-gap-analysis.md index 588354f..a65fbeb 100644 --- a/docs/research/feature-gap-analysis.md +++ b/docs/research/feature-gap-analysis.md @@ -420,8 +420,8 @@ GRAPHQL_PUBSUB_CHANNEL { | `CreateApiKeyInput` | `apiKey.create` | `name!`, `description`, `roles[]`, `permissions[]`, `overwrite` | | `AddPermissionInput` | `addPermission` | `resource!`, `actions![]` | | `AddRoleForUserInput` | `addRoleForUser` | User + role assignment | -| `AddRoleForApiKeyInput` | `apiKey.addRole` | API key + role assignment | -| `RemoveRoleFromApiKeyInput` | `apiKey.removeRole` | API key + role removal | +| `AddRoleForApiKeyInput` | `addRoleForApiKey` | API key + role assignment | +| `RemoveRoleFromApiKeyInput` | `removeRoleFromApiKey` | API key + role removal | | `arrayDiskInput` | `addDiskToArray`, `removeDiskFromArray` | Disk assignment data | | `ConnectSignInInput` | `connectSignIn` | Connect credentials | | `EnableDynamicRemoteAccessInput` | `enableDynamicRemoteAccess` | Remote access config | @@ -619,7 +619,7 @@ The current MCP server has 10 tools (76 actions) after consolidation. The follow |--------------|---------------|---------------| | `list_api_keys()` | `apiKeys` query | Key inventory | | `get_api_key(id)` | `apiKey(id)` query | Key details | -| `create_api_key(input)` | `apiKey.create` mutation | Key provisioning | +| `create_api_key(input)` | `apiKey.create` mutation | Key provisioning — **already implemented** in `unraid_keys` | | `delete_api_keys(input)` | `apiKey.delete` mutation | Key cleanup | | `update_api_key(input)` | `apiKey.update` mutation | Key modification | diff --git a/docs/research/unraid-api-crawl.md b/docs/research/unraid-api-crawl.md index e854afa..dbcd209 100644 --- a/docs/research/unraid-api-crawl.md +++ b/docs/research/unraid-api-crawl.md @@ -713,6 +713,23 @@ type Mutation { addUser(input: addUserInput!): User deleteUser(input: deleteUserInput!): User } + +type ApiKeyMutations { + """Create an API key""" + create(input: CreateApiKeyInput!): ApiKey! + + """Add a role to an API key""" + addRole(input: AddRoleForApiKeyInput!): Boolean! + + """Remove a role from an API key""" + removeRole(input: RemoveRoleFromApiKeyInput!): Boolean! + + """Delete one or more API keys""" + delete(input: DeleteApiKeyInput!): Boolean! + + """Update an API key""" + update(input: UpdateApiKeyInput!): ApiKey! +} ``` > **Note:** The client schema above uses `ID!` for disk mutation args (e.g., `mountArrayDisk(id: ID!)`), but the actual server resolvers use `PrefixedID!`. The MCP tool code correctly uses `PrefixedID!` based on server source analysis. diff --git a/docs/superpowers/plans/2026-03-13-graphql-mutations-expansion.md b/docs/superpowers/plans/2026-03-13-graphql-mutations-expansion.md new file mode 100644 index 0000000..faf9394 --- /dev/null +++ b/docs/superpowers/plans/2026-03-13-graphql-mutations-expansion.md @@ -0,0 +1,1742 @@ +# GraphQL Mutations Expansion Implementation Plan + +> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Implement all 28 missing GraphQL mutations across 11 files, bringing the server from 76 to 104 actions. + +**Architecture:** Follows the existing consolidated action pattern — QUERIES/MUTATIONS dicts, ALL_ACTIONS set, Literal type with sync-guard, and handler branches inside the registered tool function. Five existing tool files gain new mutations; one new tool file (`tools/settings.py`) is created for the 9 settings/connect mutations; `server.py` gets one new import and registration. + +**Tech Stack:** Python 3.12+, FastMCP, httpx, pytest, uv + +--- + +## File Structure + +| File | Change | New actions | +|---|---|---| +| `unraid_mcp/tools/notifications.py` | Modify | +5 mutations | +| `unraid_mcp/tools/storage.py` | Modify | +1 mutation | +| `unraid_mcp/tools/info.py` | Modify | +2 mutations | +| `unraid_mcp/tools/docker.py` | Modify | +11 mutations | +| `unraid_mcp/tools/settings.py` | **Create** | 9 mutations | +| `unraid_mcp/server.py` | Modify | register settings tool | +| `tests/test_notifications.py` | Modify | tests for 5 new actions | +| `tests/test_storage.py` | Modify | tests for flash_backup | +| `tests/test_info.py` | Modify | tests for update_server, update_ssh | +| `tests/test_docker.py` | Modify | tests for 11 organizer actions | +| `tests/test_settings.py` | **Create** | tests for all 9 settings actions | + +--- + +## Chunk 1: Notifications — 5 new mutations + +### Task 1: Test the 5 new notification mutations (RED) + +**Files:** +- Modify: `tests/test_notifications.py` + +- [ ] **Step 1: Append the new test class to `tests/test_notifications.py`** + +```python +class TestNewNotificationMutations: + async def test_archive_many_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "archiveNotifications": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 2, "warning": 0, "alert": 0, "total": 2}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="archive_many", notification_ids=["n:1", "n:2"]) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1] == {"ids": ["n:1", "n:2"]} + + async def test_archive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="notification_ids"): + await tool_fn(action="archive_many") + + async def test_create_unique_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "notifyIfUnique": {"id": "n:1", "title": "Test", "importance": "INFO"} + } + tool_fn = _make_tool() + result = await tool_fn( + action="create_unique", + title="Test", + subject="Subj", + description="Desc", + importance="info", + ) + assert result["success"] is True + + async def test_create_unique_returns_none_when_duplicate( + self, _mock_graphql: AsyncMock + ) -> None: + _mock_graphql.return_value = {"notifyIfUnique": None} + tool_fn = _make_tool() + result = await tool_fn( + action="create_unique", + title="T", + subject="S", + description="D", + importance="info", + ) + assert result["success"] is True + assert result["duplicate"] is True + + async def test_create_unique_requires_fields(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="requires title"): + await tool_fn(action="create_unique") + + async def test_unarchive_many_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "unarchiveNotifications": { + "unread": {"info": 2, "warning": 0, "alert": 0, "total": 2}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="unarchive_many", notification_ids=["n:1", "n:2"]) + assert result["success"] is True + + async def test_unarchive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="notification_ids"): + await tool_fn(action="unarchive_many") + + async def test_unarchive_all_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "unarchiveAll": { + "unread": {"info": 5, "warning": 1, "alert": 0, "total": 6}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="unarchive_all") + assert result["success"] is True + + async def test_unarchive_all_with_importance(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"unarchiveAll": {"unread": {"total": 1}, "archive": {"total": 0}}} + tool_fn = _make_tool() + await tool_fn(action="unarchive_all", importance="WARNING") + call_args = _mock_graphql.call_args + assert call_args[0][1] == {"importance": "WARNING"} + + async def test_recalculate_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "recalculateOverview": { + "unread": {"info": 3, "warning": 1, "alert": 0, "total": 4}, + "archive": {"info": 10, "warning": 0, "alert": 0, "total": 10}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="recalculate") + assert result["success"] is True +``` + +- [ ] **Step 2: Run tests to confirm they fail** + +```bash +cd /home/jmagar/workspace/unraid-mcp +uv run pytest tests/test_notifications.py::TestNewNotificationMutations -v 2>&1 | tail -20 +``` + +Expected: All 10 tests FAIL with `ToolError: Invalid action` or collection errors. + +--- + +### Task 2: Implement the 5 new notification mutations (GREEN) + +**Files:** +- Modify: `unraid_mcp/tools/notifications.py` + +- [ ] **Step 3: Add 5 entries to the MUTATIONS dict** (insert after the existing `"archive_all"` entry, before the closing `}`) + +```python + "archive_many": """ + mutation ArchiveNotifications($ids: [PrefixedID!]!) { + archiveNotifications(ids: $ids) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "create_unique": """ + mutation NotifyIfUnique($input: NotificationData!) { + notifyIfUnique(input: $input) { id title importance } + } + """, + "unarchive_many": """ + mutation UnarchiveNotifications($ids: [PrefixedID!]!) { + unarchiveNotifications(ids: $ids) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "unarchive_all": """ + mutation UnarchiveAll($importance: NotificationImportance) { + unarchiveAll(importance: $importance) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "recalculate": """ + mutation RecalculateOverview { + recalculateOverview { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, +``` + +- [ ] **Step 4: Update the NOTIFICATION_ACTIONS Literal and add `notification_ids` param** + +Replace the `NOTIFICATION_ACTIONS` Literal: + +```python +NOTIFICATION_ACTIONS = Literal[ + "overview", + "list", + "warnings", + "create", + "archive", + "unread", + "delete", + "delete_archived", + "archive_all", + "archive_many", + "create_unique", + "unarchive_many", + "unarchive_all", + "recalculate", +] +``` + +Add `notification_ids` to the tool function signature (after `description`): + +```python + notification_ids: list[str] | None = None, +``` + +Update the docstring to document the 5 new actions: + +``` + archive_many - Archive multiple notifications by ID (requires notification_ids) + create_unique - Create notification only if no equivalent unread exists (requires title, subject, description, importance) + unarchive_many - Move notifications back to unread (requires notification_ids) + unarchive_all - Move all archived notifications to unread (optional importance filter) + recalculate - Recompute overview counts from disk +``` + +- [ ] **Step 5: Add handler branches** (insert before the final `raise ToolError(...)` line) + +```python + if action == "archive_many": + if not notification_ids: + raise ToolError("notification_ids is required for 'archive_many' action") + data = await make_graphql_request(MUTATIONS["archive_many"], {"ids": notification_ids}) + return {"success": True, "action": "archive_many", "data": data} + + if action == "create_unique": + if title is None or subject is None or description is None or importance is None: + raise ToolError("create_unique requires title, subject, description, and importance") + if importance.upper() not in _VALID_IMPORTANCE: + raise ToolError( + f"importance must be one of: {', '.join(sorted(_VALID_IMPORTANCE))}. " + f"Got: '{importance}'" + ) + input_data = { + "title": title, + "subject": subject, + "description": description, + "importance": importance.upper(), + } + data = await make_graphql_request(MUTATIONS["create_unique"], {"input": input_data}) + notification = data.get("notifyIfUnique") + if notification is None: + return {"success": True, "duplicate": True, "data": None} + return {"success": True, "duplicate": False, "data": notification} + + if action == "unarchive_many": + if not notification_ids: + raise ToolError("notification_ids is required for 'unarchive_many' action") + data = await make_graphql_request(MUTATIONS["unarchive_many"], {"ids": notification_ids}) + return {"success": True, "action": "unarchive_many", "data": data} + + if action == "unarchive_all": + vars_: dict[str, Any] | None = None + if importance: + vars_ = {"importance": importance.upper()} + data = await make_graphql_request(MUTATIONS["unarchive_all"], vars_) + return {"success": True, "action": "unarchive_all", "data": data} + + if action == "recalculate": + data = await make_graphql_request(MUTATIONS["recalculate"]) + return {"success": True, "action": "recalculate", "data": data} +``` + +- [ ] **Step 6: Run tests — all must pass** + +```bash +uv run pytest tests/test_notifications.py -v 2>&1 | tail -30 +``` + +Expected: All tests PASS (original 24 + 10 new = 34 total). + +- [ ] **Step 7: Lint** + +```bash +uv run ruff check unraid_mcp/tools/notifications.py && uv run ruff format --check unraid_mcp/tools/notifications.py +``` + +Expected: No errors. + +- [ ] **Step 8: Commit** + +```bash +git add unraid_mcp/tools/notifications.py tests/test_notifications.py +git commit -m "feat: add 5 missing notification mutations (archive_many, create_unique, unarchive_many, unarchive_all, recalculate)" +``` + +--- + +## Chunk 2: Storage — flash_backup + Info — update_server/update_ssh + +### Task 3: Test flash_backup mutation (RED) + +**Files:** +- Modify: `tests/test_storage.py` + +- [ ] **Step 1: Append new test class to `tests/test_storage.py`** + +```python +class TestStorageFlashBackup: + async def test_flash_backup_requires_confirm(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn( + action="flash_backup", + remote_name="myremote", + source_path="/boot", + destination_path="/backups/flash", + ) + + async def test_flash_backup_requires_remote_name(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="remote_name"): + await tool_fn(action="flash_backup", confirm=True) + + async def test_flash_backup_requires_source_path(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="source_path"): + await tool_fn(action="flash_backup", confirm=True, remote_name="r") + + async def test_flash_backup_requires_destination_path(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destination_path"): + await tool_fn(action="flash_backup", confirm=True, remote_name="r", source_path="/boot") + + async def test_flash_backup_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "initiateFlashBackup": {"status": "started", "jobId": "job-123"} + } + tool_fn = _make_tool() + result = await tool_fn( + action="flash_backup", + confirm=True, + remote_name="myremote", + source_path="/boot", + destination_path="/backups/flash", + ) + assert result["success"] is True + assert result["data"]["status"] == "started" + + async def test_flash_backup_passes_options(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"initiateFlashBackup": {"status": "started", "jobId": None}} + tool_fn = _make_tool() + await tool_fn( + action="flash_backup", + confirm=True, + remote_name="r", + source_path="/boot", + destination_path="/bak", + backup_options={"dry-run": True}, + ) + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["options"] == {"dry-run": True} +``` + +- [ ] **Step 2: Run tests to confirm they fail** + +```bash +uv run pytest tests/test_storage.py::TestStorageFlashBackup -v 2>&1 | tail -15 +``` + +Expected: All FAIL with `ToolError: Invalid action` or collection errors. + +--- + +### Task 4: Implement flash_backup in storage.py (GREEN) + +**Files:** +- Modify: `unraid_mcp/tools/storage.py` + +- [ ] **Step 3: Add MUTATIONS dict and DESTRUCTIVE_ACTIONS** (insert after the QUERIES dict closing `}`) + +```python +MUTATIONS: dict[str, str] = { + "flash_backup": """ + mutation InitiateFlashBackup($input: InitiateFlashBackupInput!) { + initiateFlashBackup(input: $input) { status jobId } + } + """, +} + +DESTRUCTIVE_ACTIONS = {"flash_backup"} +``` + +- [ ] **Step 4: Update ALL_ACTIONS, STORAGE_ACTIONS Literal, and sync guard** + +Replace: +```python +ALL_ACTIONS = set(QUERIES) +``` +With: +```python +ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) +``` + +Replace the STORAGE_ACTIONS Literal: +```python +STORAGE_ACTIONS = Literal[ + "shares", + "disks", + "disk_details", + "unassigned", + "log_files", + "logs", + "flash_backup", +] +``` + +- [ ] **Step 5: Add params and `confirm` to the tool function signature** + +Add after `tail_lines`: +```python + confirm: bool = False, + remote_name: str | None = None, + source_path: str | None = None, + destination_path: str | None = None, + backup_options: dict[str, Any] | None = None, +``` + +- [ ] **Step 6: Add validation and handler branch** (insert before the final `raise ToolError(...)`) + +Add destructive guard after the action validation: +```python + if action in DESTRUCTIVE_ACTIONS and not confirm: + raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.") +``` + +Add handler: +```python + if action == "flash_backup": + if not remote_name: + raise ToolError("remote_name is required for 'flash_backup' action") + if not source_path: + raise ToolError("source_path is required for 'flash_backup' action") + if not destination_path: + raise ToolError("destination_path is required for 'flash_backup' action") + input_data: dict[str, Any] = { + "remoteName": remote_name, + "sourcePath": source_path, + "destinationPath": destination_path, + } + if backup_options is not None: + input_data["options"] = backup_options + data = await make_graphql_request(MUTATIONS["flash_backup"], {"input": input_data}) + return {"success": True, "action": "flash_backup", "data": data.get("initiateFlashBackup")} +``` + +Update the docstring to include: +``` + flash_backup - Initiate flash drive backup via rclone (requires remote_name, source_path, destination_path, confirm=True) +``` + +- [ ] **Step 7: Run all storage tests** + +```bash +uv run pytest tests/test_storage.py -v 2>&1 | tail -20 +``` + +Expected: All PASS. + +- [ ] **Step 8: Lint** + +```bash +uv run ruff check unraid_mcp/tools/storage.py && uv run ruff format --check unraid_mcp/tools/storage.py +``` + +- [ ] **Step 9: Commit** + +```bash +git add unraid_mcp/tools/storage.py tests/test_storage.py +git commit -m "feat: add flash_backup mutation to storage tool" +``` + +--- + +### Task 5: Test info mutations — update_server, update_ssh (RED) + +**Files:** +- Modify: `tests/test_info.py` + +- [ ] **Step 10: Append new test class to `tests/test_info.py`** + +```python +class TestInfoMutations: + async def test_update_server_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "updateServerIdentity": {"id": "srv:1", "name": "tootie", "comment": "main server", "status": "ONLINE"} + } + tool_fn = _make_tool() + result = await tool_fn(action="update_server", server_name="tootie", server_comment="main server") + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["name"] == "tootie" + assert call_args[0][1]["comment"] == "main server" + + async def test_update_server_requires_name(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="server_name"): + await tool_fn(action="update_server") + + async def test_update_server_with_sys_model(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"updateServerIdentity": {"id": "srv:1", "name": "tootie", "comment": None, "status": "ONLINE"}} + tool_fn = _make_tool() + await tool_fn(action="update_server", server_name="tootie", sys_model="Custom Server") + call_args = _mock_graphql.call_args + assert call_args[0][1]["sysModel"] == "Custom Server" + + async def test_update_ssh_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "updateSshSettings": {"id": "vars:1", "useSsh": True, "portssh": 22} + } + tool_fn = _make_tool() + result = await tool_fn(action="update_ssh", ssh_enabled=True, ssh_port=22) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["enabled"] is True + assert call_args[0][1]["input"]["port"] == 22 + + async def test_update_ssh_requires_enabled(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="ssh_enabled"): + await tool_fn(action="update_ssh", ssh_port=22) + + async def test_update_ssh_requires_port(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="ssh_port"): + await tool_fn(action="update_ssh", ssh_enabled=True) +``` + +- [ ] **Step 11: Run tests to confirm they fail** + +```bash +uv run pytest tests/test_info.py::TestInfoMutations -v 2>&1 | tail -15 +``` + +Expected: All FAIL. + +--- + +### Task 6: Implement update_server and update_ssh in info.py (GREEN) + +**Files:** +- Modify: `unraid_mcp/tools/info.py` + +- [ ] **Step 12: Add MUTATIONS dict** (insert after the QUERIES dict closing `}`) + +```python +MUTATIONS: dict[str, str] = { + "update_server": """ + mutation UpdateServerIdentity($name: String!, $comment: String, $sysModel: String) { + updateServerIdentity(name: $name, comment: $comment, sysModel: $sysModel) { + id name comment status + } + } + """, + "update_ssh": """ + mutation UpdateSshSettings($input: UpdateSshInput!) { + updateSshSettings(input: $input) { id useSsh portssh } + } + """, +} +``` + +- [ ] **Step 13: Update ALL_ACTIONS, INFO_ACTIONS Literal, and sync guard** + +Replace: +```python +ALL_ACTIONS = set(QUERIES) +``` +With: +```python +ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) +``` + +Add `"update_server"` and `"update_ssh"` to the `INFO_ACTIONS` Literal (append before the closing bracket): +```python + "update_server", + "update_ssh", +``` + +Update the sync guard error message: +```python +if set(get_args(INFO_ACTIONS)) != ALL_ACTIONS: + _missing = ALL_ACTIONS - set(get_args(INFO_ACTIONS)) + _extra = set(get_args(INFO_ACTIONS)) - ALL_ACTIONS + raise RuntimeError( + f"INFO_ACTIONS and ALL_ACTIONS are out of sync. " + f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}" + ) +``` + +- [ ] **Step 14: Add params to the tool function signature** + +Add after `device_id`: +```python + server_name: str | None = None, + server_comment: str | None = None, + sys_model: str | None = None, + ssh_enabled: bool | None = None, + ssh_port: int | None = None, +``` + +- [ ] **Step 15: Add handler branches** (insert before the final `raise ToolError(...)`) + +```python + if action == "update_server": + if server_name is None: + raise ToolError("server_name is required for 'update_server' action") + variables = {"name": server_name} + if server_comment is not None: + variables["comment"] = server_comment + if sys_model is not None: + variables["sysModel"] = sys_model + data = await make_graphql_request(MUTATIONS["update_server"], variables) + return {"success": True, "action": "update_server", "data": data.get("updateServerIdentity")} + + if action == "update_ssh": + if ssh_enabled is None: + raise ToolError("ssh_enabled is required for 'update_ssh' action") + if ssh_port is None: + raise ToolError("ssh_port is required for 'update_ssh' action") + data = await make_graphql_request( + MUTATIONS["update_ssh"], + {"input": {"enabled": ssh_enabled, "port": ssh_port}}, + ) + return {"success": True, "action": "update_ssh", "data": data.get("updateSshSettings")} +``` + +Update docstring to add: +``` + update_server - Update server name, comment, and model (requires server_name) + update_ssh - Enable/disable SSH and set port (requires ssh_enabled, ssh_port) +``` + +- [ ] **Step 16: Run all info tests** + +```bash +uv run pytest tests/test_info.py -v 2>&1 | tail -20 +``` + +Expected: All PASS. + +- [ ] **Step 17: Lint** + +```bash +uv run ruff check unraid_mcp/tools/info.py && uv run ruff format --check unraid_mcp/tools/info.py +``` + +- [ ] **Step 18: Commit** + +```bash +git add unraid_mcp/tools/info.py tests/test_info.py +git commit -m "feat: add update_server and update_ssh mutations to info tool" +``` + +--- + +## Chunk 3: Docker — 11 organizer mutations + +### Task 7: Test the 11 Docker organizer mutations (RED) + +**Files:** +- Modify: `tests/test_docker.py` + +- [ ] **Step 1: Append organizer test class to `tests/test_docker.py`** + +```python +# Helper fixture for organizer response +_ORGANIZER_RESPONSE = { + "version": 1.0, + "views": [{"id": "default", "name": "Default", "rootId": "root", "flatEntries": []}], +} + + +class TestDockerOrganizerMutations: + async def test_create_folder_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"createDockerFolder": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="create_folder", folder_name="Media Apps") + assert result["success"] is True + + async def test_create_folder_requires_name(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="folder_name"): + await tool_fn(action="create_folder") + + async def test_create_folder_passes_children(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"createDockerFolder": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + await tool_fn(action="create_folder", folder_name="Media", children_ids=["c1", "c2"]) + call_args = _mock_graphql.call_args + assert call_args[0][1]["childrenIds"] == ["c1", "c2"] + + async def test_set_folder_children_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"setDockerFolderChildren": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="set_folder_children", children_ids=["c1"]) + assert result["success"] is True + + async def test_set_folder_children_requires_children(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="children_ids"): + await tool_fn(action="set_folder_children") + + async def test_delete_entries_requires_confirm(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn(action="delete_entries", entry_ids=["e1"]) + + async def test_delete_entries_requires_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="entry_ids"): + await tool_fn(action="delete_entries", confirm=True) + + async def test_delete_entries_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"deleteDockerEntries": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="delete_entries", entry_ids=["e1", "e2"], confirm=True) + assert result["success"] is True + + async def test_move_to_folder_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"moveDockerEntriesToFolder": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn( + action="move_to_folder", + source_entry_ids=["e1"], + destination_folder_id="folder-1", + ) + assert result["success"] is True + + async def test_move_to_folder_requires_source_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="source_entry_ids"): + await tool_fn(action="move_to_folder", destination_folder_id="f1") + + async def test_move_to_folder_requires_destination(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destination_folder_id"): + await tool_fn(action="move_to_folder", source_entry_ids=["e1"]) + + async def test_move_to_position_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"moveDockerItemsToPosition": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn( + action="move_to_position", + source_entry_ids=["e1"], + destination_folder_id="folder-1", + position=2.0, + ) + assert result["success"] is True + + async def test_move_to_position_requires_position(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="position"): + await tool_fn( + action="move_to_position", + source_entry_ids=["e1"], + destination_folder_id="f1", + ) + + async def test_rename_folder_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"renameDockerFolder": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="rename_folder", folder_id="f1", new_folder_name="New Name") + assert result["success"] is True + + async def test_rename_folder_requires_folder_id(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="folder_id"): + await tool_fn(action="rename_folder", new_folder_name="New") + + async def test_rename_folder_requires_new_name(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="new_folder_name"): + await tool_fn(action="rename_folder", folder_id="f1") + + async def test_create_folder_with_items_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"createDockerFolderWithItems": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="create_folder_with_items", folder_name="New Folder") + assert result["success"] is True + + async def test_create_folder_with_items_requires_name(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="folder_name"): + await tool_fn(action="create_folder_with_items") + + async def test_update_view_prefs_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"updateDockerViewPreferences": _ORGANIZER_RESPONSE} + tool_fn = _make_tool() + result = await tool_fn(action="update_view_prefs", view_prefs={"sort": "name"}) + assert result["success"] is True + + async def test_update_view_prefs_requires_prefs(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="view_prefs"): + await tool_fn(action="update_view_prefs") + + async def test_sync_templates_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "syncDockerTemplatePaths": {"scanned": 10, "matched": 8, "skipped": 2, "errors": []} + } + tool_fn = _make_tool() + result = await tool_fn(action="sync_templates") + assert result["success"] is True + + async def test_reset_template_mappings_requires_confirm(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn(action="reset_template_mappings") + + async def test_reset_template_mappings_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"resetDockerTemplateMappings": True} + tool_fn = _make_tool() + result = await tool_fn(action="reset_template_mappings", confirm=True) + assert result["success"] is True + + async def test_refresh_digests_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"refreshDockerDigests": True} + tool_fn = _make_tool() + result = await tool_fn(action="refresh_digests") + assert result["success"] is True +``` + +- [ ] **Step 2: Run tests to confirm they fail** + +```bash +uv run pytest tests/test_docker.py::TestDockerOrganizerMutations -v 2>&1 | tail -20 +``` + +Expected: All FAIL. + +--- + +### Task 8: Implement 11 organizer mutations in docker.py (GREEN) + +**Files:** +- Modify: `unraid_mcp/tools/docker.py` + +- [ ] **Step 3: Add 11 entries to the MUTATIONS dict** (append inside the existing MUTATIONS dict before its closing `}`) + +```python + "create_folder": """ + mutation CreateDockerFolder($name: String!, $parentId: String, $childrenIds: [String!]) { + createDockerFolder(name: $name, parentId: $parentId, childrenIds: $childrenIds) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "set_folder_children": """ + mutation SetDockerFolderChildren($folderId: String, $childrenIds: [String!]!) { + setDockerFolderChildren(folderId: $folderId, childrenIds: $childrenIds) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "delete_entries": """ + mutation DeleteDockerEntries($entryIds: [String!]!) { + deleteDockerEntries(entryIds: $entryIds) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "move_to_folder": """ + mutation MoveDockerEntriesToFolder($sourceEntryIds: [String!]!, $destinationFolderId: String!) { + moveDockerEntriesToFolder(sourceEntryIds: $sourceEntryIds, destinationFolderId: $destinationFolderId) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "move_to_position": """ + mutation MoveDockerItemsToPosition($sourceEntryIds: [String!]!, $destinationFolderId: String!, $position: Float!) { + moveDockerItemsToPosition(sourceEntryIds: $sourceEntryIds, destinationFolderId: $destinationFolderId, position: $position) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "rename_folder": """ + mutation RenameDockerFolder($folderId: String!, $newName: String!) { + renameDockerFolder(folderId: $folderId, newName: $newName) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "create_folder_with_items": """ + mutation CreateDockerFolderWithItems($name: String!, $parentId: String, $sourceEntryIds: [String!], $position: Float) { + createDockerFolderWithItems(name: $name, parentId: $parentId, sourceEntryIds: $sourceEntryIds, position: $position) { + version views { id name rootId flatEntries { id type name parentId depth position path hasChildren childrenIds } } + } + } + """, + "update_view_prefs": """ + mutation UpdateDockerViewPreferences($viewId: String, $prefs: JSON!) { + updateDockerViewPreferences(viewId: $viewId, prefs: $prefs) { + version views { id name rootId } + } + } + """, + "sync_templates": """ + mutation SyncDockerTemplatePaths { + syncDockerTemplatePaths { scanned matched skipped errors } + } + """, + "reset_template_mappings": """ + mutation ResetDockerTemplateMappings { + resetDockerTemplateMappings + } + """, + "refresh_digests": """ + mutation RefreshDockerDigests { + refreshDockerDigests + } + """, +``` + +- [ ] **Step 4: Update DESTRUCTIVE_ACTIONS** + +Replace: +```python +DESTRUCTIVE_ACTIONS = {"remove", "update_all"} +``` +With: +```python +DESTRUCTIVE_ACTIONS = {"remove", "update_all", "delete_entries", "reset_template_mappings"} +``` + +- [ ] **Step 5: Update DOCKER_ACTIONS Literal** + +Append 11 new strings to the Literal (before the closing `]`): +```python + "create_folder", + "set_folder_children", + "delete_entries", + "move_to_folder", + "move_to_position", + "rename_folder", + "create_folder_with_items", + "update_view_prefs", + "sync_templates", + "reset_template_mappings", + "refresh_digests", +``` + +- [ ] **Step 6: Add new params to the tool function signature** + +Add after `tail_lines`: +```python + folder_name: str | None = None, + folder_id: str | None = None, + parent_id: str | None = None, + children_ids: list[str] | None = None, + entry_ids: list[str] | None = None, + source_entry_ids: list[str] | None = None, + destination_folder_id: str | None = None, + position: float | None = None, + new_folder_name: str | None = None, + view_id: str = "default", + view_prefs: dict[str, Any] | None = None, +``` + +- [ ] **Step 7: Add handler branches** (insert before the final `raise ToolError(...)`) + +```python + # --- Docker organizer mutations --- + if action == "create_folder": + if not folder_name: + raise ToolError("folder_name is required for 'create_folder' action") + vars_: dict[str, Any] = {"name": folder_name} + if parent_id is not None: + vars_["parentId"] = parent_id + if children_ids is not None: + vars_["childrenIds"] = children_ids + data = await make_graphql_request(MUTATIONS["create_folder"], vars_) + return {"success": True, "action": "create_folder", "organizer": data.get("createDockerFolder")} + + if action == "set_folder_children": + if not children_ids: + raise ToolError("children_ids is required for 'set_folder_children' action") + vars_ = {"childrenIds": children_ids} + if folder_id is not None: + vars_["folderId"] = folder_id + data = await make_graphql_request(MUTATIONS["set_folder_children"], vars_) + return {"success": True, "action": "set_folder_children", "organizer": data.get("setDockerFolderChildren")} + + if action == "delete_entries": + if not entry_ids: + raise ToolError("entry_ids is required for 'delete_entries' action") + data = await make_graphql_request(MUTATIONS["delete_entries"], {"entryIds": entry_ids}) + return {"success": True, "action": "delete_entries", "organizer": data.get("deleteDockerEntries")} + + if action == "move_to_folder": + if not source_entry_ids: + raise ToolError("source_entry_ids is required for 'move_to_folder' action") + if not destination_folder_id: + raise ToolError("destination_folder_id is required for 'move_to_folder' action") + data = await make_graphql_request( + MUTATIONS["move_to_folder"], + {"sourceEntryIds": source_entry_ids, "destinationFolderId": destination_folder_id}, + ) + return {"success": True, "action": "move_to_folder", "organizer": data.get("moveDockerEntriesToFolder")} + + if action == "move_to_position": + if not source_entry_ids: + raise ToolError("source_entry_ids is required for 'move_to_position' action") + if not destination_folder_id: + raise ToolError("destination_folder_id is required for 'move_to_position' action") + if position is None: + raise ToolError("position is required for 'move_to_position' action") + data = await make_graphql_request( + MUTATIONS["move_to_position"], + { + "sourceEntryIds": source_entry_ids, + "destinationFolderId": destination_folder_id, + "position": position, + }, + ) + return {"success": True, "action": "move_to_position", "organizer": data.get("moveDockerItemsToPosition")} + + if action == "rename_folder": + if not folder_id: + raise ToolError("folder_id is required for 'rename_folder' action") + if not new_folder_name: + raise ToolError("new_folder_name is required for 'rename_folder' action") + data = await make_graphql_request( + MUTATIONS["rename_folder"], {"folderId": folder_id, "newName": new_folder_name} + ) + return {"success": True, "action": "rename_folder", "organizer": data.get("renameDockerFolder")} + + if action == "create_folder_with_items": + if not folder_name: + raise ToolError("folder_name is required for 'create_folder_with_items' action") + vars_ = {"name": folder_name} + if parent_id is not None: + vars_["parentId"] = parent_id + if entry_ids is not None: + vars_["sourceEntryIds"] = entry_ids + if position is not None: + vars_["position"] = position + data = await make_graphql_request(MUTATIONS["create_folder_with_items"], vars_) + return {"success": True, "action": "create_folder_with_items", "organizer": data.get("createDockerFolderWithItems")} + + if action == "update_view_prefs": + if view_prefs is None: + raise ToolError("view_prefs is required for 'update_view_prefs' action") + data = await make_graphql_request( + MUTATIONS["update_view_prefs"], {"viewId": view_id, "prefs": view_prefs} + ) + return {"success": True, "action": "update_view_prefs", "organizer": data.get("updateDockerViewPreferences")} + + if action == "sync_templates": + data = await make_graphql_request(MUTATIONS["sync_templates"]) + return {"success": True, "action": "sync_templates", "result": data.get("syncDockerTemplatePaths")} + + if action == "reset_template_mappings": + data = await make_graphql_request(MUTATIONS["reset_template_mappings"]) + return {"success": True, "action": "reset_template_mappings", "result": data.get("resetDockerTemplateMappings")} + + if action == "refresh_digests": + data = await make_graphql_request(MUTATIONS["refresh_digests"]) + return {"success": True, "action": "refresh_digests", "result": data.get("refreshDockerDigests")} +``` + +Update docstring to add the 11 new actions. + +- [ ] **Step 8: Run all docker tests** + +```bash +uv run pytest tests/test_docker.py -v 2>&1 | tail -30 +``` + +Expected: All PASS. + +- [ ] **Step 9: Lint** + +```bash +uv run ruff check unraid_mcp/tools/docker.py && uv run ruff format --check unraid_mcp/tools/docker.py +``` + +- [ ] **Step 10: Commit** + +```bash +git add unraid_mcp/tools/docker.py tests/test_docker.py +git commit -m "feat: add 11 Docker organizer mutations (folders, positions, templates, digests)" +``` + +--- + +## Chunk 4: New settings tool — 9 mutations + +### Task 9: Create test_settings.py (RED) + +**Files:** +- Create: `tests/test_settings.py` + +- [ ] **Step 1: Create `tests/test_settings.py`** + +```python +"""Tests for unraid_settings tool.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest +from conftest import make_tool_fn + +from unraid_mcp.core.exceptions import ToolError + + +@pytest.fixture +def _mock_graphql() -> Generator[AsyncMock, None, None]: + with patch( + "unraid_mcp.tools.settings.make_graphql_request", new_callable=AsyncMock + ) as mock: + yield mock + + +def _make_tool(): + return make_tool_fn( + "unraid_mcp.tools.settings", "register_settings_tool", "unraid_settings" + ) + + +class TestSettingsValidation: + async def test_invalid_action_rejected(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="Invalid action"): + await tool_fn(action="nonexistent") # type: ignore[arg-type] + + async def test_configure_ups_requires_confirm(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn(action="configure_ups", ups_config={"service": "ENABLE"}) + + async def test_setup_remote_access_requires_confirm(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn(action="setup_remote_access", access_type="ALWAYS") + + async def test_enable_dynamic_remote_access_requires_confirm( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="destructive"): + await tool_fn( + action="enable_dynamic_remote_access", + access_url_type="LAN", + dynamic_enabled=True, + ) + + +class TestSettingsUpdate: + async def test_update_requires_settings_input(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="settings_input"): + await tool_fn(action="update") + + async def test_update_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "updateSettings": {"restartRequired": False, "values": {"key": "val"}, "warnings": []} + } + tool_fn = _make_tool() + result = await tool_fn(action="update", settings_input={"shareSmbEnabled": True}) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"] == {"shareSmbEnabled": True} + + +class TestTemperatureConfig: + async def test_update_temperature_requires_config(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="temperature_config"): + await tool_fn(action="update_temperature") + + async def test_update_temperature_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"updateTemperatureConfig": True} + tool_fn = _make_tool() + result = await tool_fn( + action="update_temperature", + temperature_config={"enabled": True, "polling_interval": 30}, + ) + assert result["success"] is True + + +class TestSystemTime: + async def test_update_time_requires_at_least_one_field( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="at least one"): + await tool_fn(action="update_time") + + async def test_update_time_timezone_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "updateSystemTime": { + "currentTime": "2026-03-13T12:00:00Z", + "timeZone": "America/New_York", + "useNtp": True, + "ntpServers": ["pool.ntp.org"], + } + } + tool_fn = _make_tool() + result = await tool_fn(action="update_time", time_zone="America/New_York") + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["timeZone"] == "America/New_York" + + async def test_update_time_ntp_servers(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"updateSystemTime": {"currentTime": "2026-03-13T12:00:00Z", "timeZone": "UTC", "useNtp": True, "ntpServers": ["0.pool.ntp.org"]}} + tool_fn = _make_tool() + await tool_fn(action="update_time", use_ntp=True, ntp_servers=["0.pool.ntp.org"]) + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["useNtp"] is True + assert call_args[0][1]["input"]["ntpServers"] == ["0.pool.ntp.org"] + + +class TestUpsConfig: + async def test_configure_ups_requires_config(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="ups_config"): + await tool_fn(action="configure_ups", confirm=True) + + async def test_configure_ups_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"configureUps": True} + tool_fn = _make_tool() + result = await tool_fn( + action="configure_ups", + confirm=True, + ups_config={"service": "ENABLE", "upsCable": "USB"}, + ) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["config"]["service"] == "ENABLE" + + +class TestApiSettings: + async def test_update_api_requires_at_least_one_field( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="at least one"): + await tool_fn(action="update_api") + + async def test_update_api_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "updateApiSettings": {"accessType": "ALWAYS", "forwardType": None, "port": None} + } + tool_fn = _make_tool() + result = await tool_fn(action="update_api", access_type="ALWAYS") + assert result["success"] is True + + +class TestConnectActions: + async def test_connect_sign_in_requires_api_key(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="api_key"): + await tool_fn(action="connect_sign_in") + + async def test_connect_sign_in_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"connectSignIn": True} + tool_fn = _make_tool() + result = await tool_fn( + action="connect_sign_in", + api_key="secret-key", + username="jmagar", + email="jmagar@gmail.com", + ) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["apiKey"] == "secret-key" + assert call_args[0][1]["input"]["userInfo"]["preferred_username"] == "jmagar" + + async def test_connect_sign_out_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"connectSignOut": True} + tool_fn = _make_tool() + result = await tool_fn(action="connect_sign_out") + assert result["success"] is True + + +class TestRemoteAccess: + async def test_setup_remote_access_requires_access_type( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="access_type"): + await tool_fn(action="setup_remote_access", confirm=True) + + async def test_setup_remote_access_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = {"setupRemoteAccess": True} + tool_fn = _make_tool() + result = await tool_fn( + action="setup_remote_access", confirm=True, access_type="ALWAYS" + ) + assert result["success"] is True + + async def test_enable_dynamic_remote_access_requires_type_and_enabled( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="access_url_type"): + await tool_fn(action="enable_dynamic_remote_access", confirm=True) + + async def test_enable_dynamic_remote_access_requires_enabled_field( + self, _mock_graphql: AsyncMock + ) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="dynamic_enabled"): + await tool_fn( + action="enable_dynamic_remote_access", confirm=True, access_url_type="LAN" + ) + + async def test_enable_dynamic_remote_access_success( + self, _mock_graphql: AsyncMock + ) -> None: + _mock_graphql.return_value = {"enableDynamicRemoteAccess": True} + tool_fn = _make_tool() + result = await tool_fn( + action="enable_dynamic_remote_access", + confirm=True, + access_url_type="LAN", + access_url_ipv4="10.1.0.2", + dynamic_enabled=True, + ) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1]["input"]["enabled"] is True + assert call_args[0][1]["input"]["url"]["type"] == "LAN" +``` + +- [ ] **Step 2: Run tests to confirm they fail (import error expected)** + +```bash +uv run pytest tests/test_settings.py -v 2>&1 | tail -10 +``` + +Expected: `ModuleNotFoundError` or `ImportError` — the file doesn't exist yet. + +--- + +### Task 10: Create tools/settings.py (GREEN) + +**Files:** +- Create: `unraid_mcp/tools/settings.py` + +- [ ] **Step 3: Create `unraid_mcp/tools/settings.py`** + +```python +"""System settings, time, UPS, and remote access mutations. + +Provides the `unraid_settings` tool with 9 actions for updating system +configuration, time settings, UPS, API settings, and Unraid Connect. +""" + +from typing import Any, Literal, get_args + +from fastmcp import FastMCP + +from ..config.logging import logger +from ..core.client import make_graphql_request +from ..core.exceptions import ToolError, tool_error_handler + + +MUTATIONS: dict[str, str] = { + "update": """ + mutation UpdateSettings($input: JSON!) { + updateSettings(input: $input) { restartRequired values warnings } + } + """, + "update_temperature": """ + mutation UpdateTemperatureConfig($input: TemperatureConfigInput!) { + updateTemperatureConfig(input: $input) + } + """, + "update_time": """ + mutation UpdateSystemTime($input: UpdateSystemTimeInput!) { + updateSystemTime(input: $input) { currentTime timeZone useNtp ntpServers } + } + """, + "configure_ups": """ + mutation ConfigureUps($config: UPSConfigInput!) { + configureUps(config: $config) + } + """, + "update_api": """ + mutation UpdateApiSettings($input: ConnectSettingsInput!) { + updateApiSettings(input: $input) { accessType forwardType port } + } + """, + "connect_sign_in": """ + mutation ConnectSignIn($input: ConnectSignInInput!) { + connectSignIn(input: $input) + } + """, + "connect_sign_out": """ + mutation ConnectSignOut { + connectSignOut + } + """, + "setup_remote_access": """ + mutation SetupRemoteAccess($input: SetupRemoteAccessInput!) { + setupRemoteAccess(input: $input) + } + """, + "enable_dynamic_remote_access": """ + mutation EnableDynamicRemoteAccess($input: EnableDynamicRemoteAccessInput!) { + enableDynamicRemoteAccess(input: $input) + } + """, +} + +DESTRUCTIVE_ACTIONS = {"configure_ups", "setup_remote_access", "enable_dynamic_remote_access"} +ALL_ACTIONS = set(MUTATIONS) + +SETTINGS_ACTIONS = Literal[ + "update", + "update_temperature", + "update_time", + "configure_ups", + "update_api", + "connect_sign_in", + "connect_sign_out", + "setup_remote_access", + "enable_dynamic_remote_access", +] + +if set(get_args(SETTINGS_ACTIONS)) != ALL_ACTIONS: + _missing = ALL_ACTIONS - set(get_args(SETTINGS_ACTIONS)) + _extra = set(get_args(SETTINGS_ACTIONS)) - ALL_ACTIONS + raise RuntimeError( + f"SETTINGS_ACTIONS and ALL_ACTIONS are out of sync. " + f"Missing from Literal: {_missing or 'none'}. Extra in Literal: {_extra or 'none'}" + ) + + +def register_settings_tool(mcp: FastMCP) -> None: + """Register the unraid_settings tool with the FastMCP instance.""" + + @mcp.tool() + async def unraid_settings( + action: SETTINGS_ACTIONS, + confirm: bool = False, + # updateSettings + settings_input: dict[str, Any] | None = None, + # updateTemperatureConfig + temperature_config: dict[str, Any] | None = None, + # updateSystemTime + time_zone: str | None = None, + use_ntp: bool | None = None, + ntp_servers: list[str] | None = None, + manual_datetime: str | None = None, + # configureUps + ups_config: dict[str, Any] | None = None, + # updateApiSettings / setupRemoteAccess + access_type: str | None = None, + forward_type: str | None = None, + port: int | None = None, + # connectSignIn + api_key: str | None = None, + username: str | None = None, + email: str | None = None, + avatar: str | None = None, + # enableDynamicRemoteAccess + access_url_type: str | None = None, + access_url_name: str | None = None, + access_url_ipv4: str | None = None, + access_url_ipv6: str | None = None, + dynamic_enabled: bool | None = None, + ) -> dict[str, Any]: + """Update Unraid system settings, time, UPS, and remote access configuration. + + Actions: + update - Update system settings (requires settings_input dict) + update_temperature - Update temperature sensor config (requires temperature_config dict) + update_time - Update system time/timezone/NTP (requires at least one of: time_zone, use_ntp, ntp_servers, manual_datetime) + configure_ups - Configure UPS monitoring (requires ups_config dict, confirm=True) + update_api - Update API/Connect settings (requires at least one of: access_type, forward_type, port) + connect_sign_in - Sign in to Unraid Connect (requires api_key) + connect_sign_out - Sign out from Unraid Connect + setup_remote_access - Configure remote access (requires access_type, confirm=True) + enable_dynamic_remote_access - Enable/disable dynamic remote access (requires access_url_type, dynamic_enabled, confirm=True) + """ + if action not in ALL_ACTIONS: + raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}") + + if action in DESTRUCTIVE_ACTIONS and not confirm: + raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.") + + with tool_error_handler("settings", action, logger): + logger.info(f"Executing unraid_settings action={action}") + + if action == "update": + if settings_input is None: + raise ToolError("settings_input is required for 'update' action") + data = await make_graphql_request(MUTATIONS["update"], {"input": settings_input}) + return {"success": True, "action": "update", "data": data.get("updateSettings")} + + if action == "update_temperature": + if temperature_config is None: + raise ToolError("temperature_config is required for 'update_temperature' action") + data = await make_graphql_request( + MUTATIONS["update_temperature"], {"input": temperature_config} + ) + return {"success": True, "action": "update_temperature", "result": data.get("updateTemperatureConfig")} + + if action == "update_time": + time_input: dict[str, Any] = {} + if time_zone is not None: + time_input["timeZone"] = time_zone + if use_ntp is not None: + time_input["useNtp"] = use_ntp + if ntp_servers is not None: + time_input["ntpServers"] = ntp_servers + if manual_datetime is not None: + time_input["manualDateTime"] = manual_datetime + if not time_input: + raise ToolError( + "update_time requires at least one of: time_zone, use_ntp, ntp_servers, manual_datetime" + ) + data = await make_graphql_request(MUTATIONS["update_time"], {"input": time_input}) + return {"success": True, "action": "update_time", "data": data.get("updateSystemTime")} + + if action == "configure_ups": + if ups_config is None: + raise ToolError("ups_config is required for 'configure_ups' action") + data = await make_graphql_request(MUTATIONS["configure_ups"], {"config": ups_config}) + return {"success": True, "action": "configure_ups", "result": data.get("configureUps")} + + if action == "update_api": + api_input: dict[str, Any] = {} + if access_type is not None: + api_input["accessType"] = access_type + if forward_type is not None: + api_input["forwardType"] = forward_type + if port is not None: + api_input["port"] = port + if not api_input: + raise ToolError( + "update_api requires at least one of: access_type, forward_type, port" + ) + data = await make_graphql_request(MUTATIONS["update_api"], {"input": api_input}) + return {"success": True, "action": "update_api", "data": data.get("updateApiSettings")} + + if action == "connect_sign_in": + if not api_key: + raise ToolError("api_key is required for 'connect_sign_in' action") + sign_in_input: dict[str, Any] = {"apiKey": api_key} + if username or email: + user_info: dict[str, Any] = {} + if username: + user_info["preferred_username"] = username + if email: + user_info["email"] = email + if avatar: + user_info["avatar"] = avatar + sign_in_input["userInfo"] = user_info + data = await make_graphql_request( + MUTATIONS["connect_sign_in"], {"input": sign_in_input} + ) + return {"success": True, "action": "connect_sign_in", "result": data.get("connectSignIn")} + + if action == "connect_sign_out": + data = await make_graphql_request(MUTATIONS["connect_sign_out"]) + return {"success": True, "action": "connect_sign_out", "result": data.get("connectSignOut")} + + if action == "setup_remote_access": + if not access_type: + raise ToolError("access_type is required for 'setup_remote_access' action") + remote_input: dict[str, Any] = {"accessType": access_type} + if forward_type is not None: + remote_input["forwardType"] = forward_type + if port is not None: + remote_input["port"] = port + data = await make_graphql_request( + MUTATIONS["setup_remote_access"], {"input": remote_input} + ) + return {"success": True, "action": "setup_remote_access", "result": data.get("setupRemoteAccess")} + + if action == "enable_dynamic_remote_access": + if not access_url_type: + raise ToolError("access_url_type is required for 'enable_dynamic_remote_access' action") + if dynamic_enabled is None: + raise ToolError("dynamic_enabled is required for 'enable_dynamic_remote_access' action") + url_input: dict[str, Any] = {"type": access_url_type} + if access_url_name is not None: + url_input["name"] = access_url_name + if access_url_ipv4 is not None: + url_input["ipv4"] = access_url_ipv4 + if access_url_ipv6 is not None: + url_input["ipv6"] = access_url_ipv6 + data = await make_graphql_request( + MUTATIONS["enable_dynamic_remote_access"], + {"input": {"url": url_input, "enabled": dynamic_enabled}}, + ) + return {"success": True, "action": "enable_dynamic_remote_access", "result": data.get("enableDynamicRemoteAccess")} + + raise ToolError(f"Unhandled action '{action}' — this is a bug") + + logger.info("Settings tool registered successfully") +``` + +- [ ] **Step 4: Run settings tests** + +```bash +uv run pytest tests/test_settings.py -v 2>&1 | tail -30 +``` + +Expected: All PASS. + +- [ ] **Step 5: Lint** + +```bash +uv run ruff check unraid_mcp/tools/settings.py && uv run ruff format --check unraid_mcp/tools/settings.py +``` + +- [ ] **Step 6: Commit** + +```bash +git add unraid_mcp/tools/settings.py tests/test_settings.py +git commit -m "feat: add new unraid_settings tool with 9 mutations (settings, time, UPS, connect, remote access)" +``` + +--- + +### Task 11: Register settings tool in server.py + +**Files:** +- Modify: `unraid_mcp/server.py` + +- [ ] **Step 7: Add import** (after the existing `register_users_tool` import line) + +```python +from .tools.settings import register_settings_tool +``` + +- [ ] **Step 8: Add to registrars list** (append inside the `registrars` list) + +```python + register_settings_tool, +``` + +- [ ] **Step 9: Verify server still starts cleanly** + +```bash +uv run python -c "from unraid_mcp.server import register_all_modules, mcp; register_all_modules(); print('OK')" 2>&1 +``` + +Expected: `OK` with no errors (note: this will fail on missing env vars — that's fine, we're only checking imports and registration). + +Actually use: +```bash +uv run python -c " +import os; os.environ.setdefault('UNRAID_API_URL','http://fake'); os.environ.setdefault('UNRAID_API_KEY','fake') +from unraid_mcp.server import register_all_modules, mcp +register_all_modules() +tools = list(mcp._tool_manager._tools.keys()) +print(f'Registered {len(tools)} tools: {tools}') +" +``` + +Expected: Output includes `unraid_settings` in the tools list. + +- [ ] **Step 10: Run full test suite to confirm nothing broken** + +```bash +uv run pytest --tb=short -q 2>&1 | tail -15 +``` + +Expected: All tests PASS (598 original + ~63 new = ~661 total). + +- [ ] **Step 11: Lint all modified files** + +```bash +uv run ruff check unraid_mcp/ && uv run ruff format --check unraid_mcp/ +``` + +- [ ] **Step 12: Commit** + +```bash +git add unraid_mcp/server.py +git commit -m "feat: register unraid_settings tool in server — 11 tools, 104 actions total" +``` + +--- + +## Final Verification + +- [ ] **Count total actions** + +```bash +uv run python -c " +import os; os.environ.setdefault('UNRAID_API_URL','http://fake'); os.environ.setdefault('UNRAID_API_KEY','fake') +from unraid_mcp.tools.notifications import ALL_ACTIONS as n +from unraid_mcp.tools.storage import ALL_ACTIONS as s +from unraid_mcp.tools.info import ALL_ACTIONS as i +from unraid_mcp.tools.docker import ALL_ACTIONS as d +from unraid_mcp.tools.virtualization import ALL_ACTIONS as v +from unraid_mcp.tools.array import ALL_ACTIONS as a +from unraid_mcp.tools.rclone import ALL_ACTIONS as r +from unraid_mcp.tools.users import ALL_ACTIONS as u +from unraid_mcp.tools.keys import ALL_ACTIONS as k +from unraid_mcp.tools.health import HEALTH_ACTIONS +from unraid_mcp.tools.settings import ALL_ACTIONS as st +from typing import get_args +total = len(n)+len(s)+len(i)+len(d)+len(v)+len(a)+len(r)+len(u)+len(k)+len(get_args(HEALTH_ACTIONS))+len(st) +print(f'Total actions: {total}') +" +``` + +Expected output: `Total actions: 104` + +- [ ] **Run full test suite one final time** + +```bash +uv run pytest -v --tb=short 2>&1 | tail -20 +``` + +Expected: All tests PASS. + +- [ ] **Update MEMORY.md counts** + +Update `/home/jmagar/.claude/projects/-home-jmagar-workspace-unraid-mcp/memory/MEMORY.md`: +- Change `10 tools, 76 actions` → `11 tools, 104 actions` +- Update test count to reflect new total +- Add `unraid_settings` row to the Tool Reference table diff --git a/pyproject.toml b/pyproject.toml index ad129aa..27835b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ build-backend = "hatchling.build" # ============================================================================ [project] name = "unraid-mcp" -version = "0.2.1" +version = "0.3.0" description = "MCP Server for Unraid API - provides tools to interact with an Unraid server's GraphQL API" readme = "README.md" license = {file = "LICENSE"} diff --git a/tests/http_layer/test_request_construction.py b/tests/http_layer/test_request_construction.py index c183788..5e3efa8 100644 --- a/tests/http_layer/test_request_construction.py +++ b/tests/http_layer/test_request_construction.py @@ -228,9 +228,7 @@ class TestGraphQLErrorHandling: @respx.mock async def test_idempotent_start_error_returns_success(self) -> None: respx.post(API_URL).mock( - return_value=_graphql_response( - errors=[{"message": "Container already running"}] - ) + return_value=_graphql_response(errors=[{"message": "Container already running"}]) ) result = await make_graphql_request( 'mutation { docker { start(id: "x") } }', @@ -242,9 +240,7 @@ class TestGraphQLErrorHandling: @respx.mock async def test_idempotent_stop_error_returns_success(self) -> None: respx.post(API_URL).mock( - return_value=_graphql_response( - errors=[{"message": "Container not running"}] - ) + return_value=_graphql_response(errors=[{"message": "Container not running"}]) ) result = await make_graphql_request( 'mutation { docker { stop(id: "x") } }', @@ -275,7 +271,13 @@ class TestInfoToolRequests: async def test_overview_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"info": {"os": {"platform": "linux", "hostname": "tower"}, "cpu": {}, "memory": {}}} + { + "info": { + "os": {"platform": "linux", "hostname": "tower"}, + "cpu": {}, + "memory": {}, + } + } ) ) tool = self._get_tool() @@ -329,9 +331,7 @@ class TestInfoToolRequests: @respx.mock async def test_online_sends_correct_query(self) -> None: - route = respx.post(API_URL).mock( - return_value=_graphql_response({"online": True}) - ) + route = respx.post(API_URL).mock(return_value=_graphql_response({"online": True})) tool = self._get_tool() await tool(action="online") body = _extract_request_body(route.calls.last.request) @@ -374,9 +374,7 @@ class TestDockerToolRequests: async def test_list_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"docker": {"containers": [ - {"id": "c1", "names": ["plex"], "state": "running"} - ]}} + {"docker": {"containers": [{"id": "c1", "names": ["plex"], "state": "running"}]}} ) ) tool = self._get_tool() @@ -389,10 +387,16 @@ class TestDockerToolRequests: container_id = "a" * 64 route = respx.post(API_URL).mock( return_value=_graphql_response( - {"docker": {"start": { - "id": container_id, "names": ["plex"], - "state": "running", "status": "Up", - }}} + { + "docker": { + "start": { + "id": container_id, + "names": ["plex"], + "state": "running", + "status": "Up", + } + } + } ) ) tool = self._get_tool() @@ -406,10 +410,16 @@ class TestDockerToolRequests: container_id = "b" * 64 route = respx.post(API_URL).mock( return_value=_graphql_response( - {"docker": {"stop": { - "id": container_id, "names": ["sonarr"], - "state": "exited", "status": "Exited", - }}} + { + "docker": { + "stop": { + "id": container_id, + "names": ["sonarr"], + "state": "exited", + "status": "Exited", + } + } + } ) ) tool = self._get_tool() @@ -451,9 +461,11 @@ class TestDockerToolRequests: async def test_networks_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"dockerNetworks": [ - {"id": "n1", "name": "bridge", "driver": "bridge", "scope": "local"} - ]} + { + "dockerNetworks": [ + {"id": "n1", "name": "bridge", "driver": "bridge", "scope": "local"} + ] + } ) ) tool = self._get_tool() @@ -464,9 +476,7 @@ class TestDockerToolRequests: @respx.mock async def test_check_updates_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response( - {"docker": {"containerUpdateStatuses": []}} - ) + return_value=_graphql_response({"docker": {"containerUpdateStatuses": []}}) ) tool = self._get_tool() await tool(action="check_updates") @@ -485,17 +495,29 @@ class TestDockerToolRequests: call_count += 1 if "StopContainer" in body["query"]: return _graphql_response( - {"docker": {"stop": { - "id": container_id, "names": ["app"], - "state": "exited", "status": "Exited", - }}} + { + "docker": { + "stop": { + "id": container_id, + "names": ["app"], + "state": "exited", + "status": "Exited", + } + } + } ) if "StartContainer" in body["query"]: return _graphql_response( - {"docker": {"start": { - "id": container_id, "names": ["app"], - "state": "running", "status": "Up", - }}} + { + "docker": { + "start": { + "id": container_id, + "names": ["app"], + "state": "running", + "status": "Up", + } + } + } ) return _graphql_response({"docker": {"containers": []}}) @@ -522,10 +544,16 @@ class TestDockerToolRequests: ) if "StartContainer" in body["query"]: return _graphql_response( - {"docker": {"start": { - "id": resolved_id, "names": ["plex"], - "state": "running", "status": "Up", - }}} + { + "docker": { + "start": { + "id": resolved_id, + "names": ["plex"], + "state": "running", + "status": "Up", + } + } + } ) return _graphql_response({}) @@ -546,17 +574,17 @@ class TestVMToolRequests: @staticmethod def _get_tool(): - return make_tool_fn( - "unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm" - ) + return make_tool_fn("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm") @respx.mock async def test_list_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"vms": {"domains": [ - {"id": "v1", "name": "win10", "state": "running", "uuid": "u1"} - ]}} + { + "vms": { + "domains": [{"id": "v1", "name": "win10", "state": "running", "uuid": "u1"}] + } + } ) ) tool = self._get_tool() @@ -567,9 +595,7 @@ class TestVMToolRequests: @respx.mock async def test_start_sends_mutation_with_id(self) -> None: - route = respx.post(API_URL).mock( - return_value=_graphql_response({"vm": {"start": True}}) - ) + route = respx.post(API_URL).mock(return_value=_graphql_response({"vm": {"start": True}})) tool = self._get_tool() result = await tool(action="start", vm_id="vm-123") body = _extract_request_body(route.calls.last.request) @@ -579,9 +605,7 @@ class TestVMToolRequests: @respx.mock async def test_stop_sends_mutation_with_id(self) -> None: - route = respx.post(API_URL).mock( - return_value=_graphql_response({"vm": {"stop": True}}) - ) + route = respx.post(API_URL).mock(return_value=_graphql_response({"vm": {"stop": True}})) tool = self._get_tool() await tool(action="stop", vm_id="vm-456") body = _extract_request_body(route.calls.last.request) @@ -615,10 +639,14 @@ class TestVMToolRequests: async def test_details_finds_vm_by_name(self) -> None: respx.post(API_URL).mock( return_value=_graphql_response( - {"vms": {"domains": [ - {"id": "v1", "name": "win10", "state": "running", "uuid": "uuid-1"}, - {"id": "v2", "name": "ubuntu", "state": "stopped", "uuid": "uuid-2"}, - ]}} + { + "vms": { + "domains": [ + {"id": "v1", "name": "win10", "state": "running", "uuid": "uuid-1"}, + {"id": "v2", "name": "ubuntu", "state": "stopped", "uuid": "uuid-2"}, + ] + } + } ) ) tool = self._get_tool() @@ -642,9 +670,15 @@ class TestArrayToolRequests: async def test_parity_status_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"array": {"parityCheckStatus": { - "progress": 50, "speed": "100 MB/s", "errors": 0, - }}} + { + "array": { + "parityCheckStatus": { + "progress": 50, + "speed": "100 MB/s", + "errors": 0, + } + } + } ) ) tool = self._get_tool() @@ -706,9 +740,7 @@ class TestStorageToolRequests: @staticmethod def _get_tool(): - return make_tool_fn( - "unraid_mcp.tools.storage", "register_storage_tool", "unraid_storage" - ) + return make_tool_fn("unraid_mcp.tools.storage", "register_storage_tool", "unraid_storage") @respx.mock async def test_shares_sends_correct_query(self) -> None: @@ -737,10 +769,16 @@ class TestStorageToolRequests: async def test_disk_details_sends_variable(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"disk": { - "id": "d1", "device": "sda", "name": "Disk 1", - "serialNum": "SN123", "size": 1000000, "temperature": 35, - }} + { + "disk": { + "id": "d1", + "device": "sda", + "name": "Disk 1", + "serialNum": "SN123", + "size": 1000000, + "temperature": 35, + } + } ) ) tool = self._get_tool() @@ -766,10 +804,14 @@ class TestStorageToolRequests: async def test_logs_sends_path_and_lines_variables(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"logFile": { - "path": "/var/log/syslog", "content": "log line", - "totalLines": 100, "startLine": 1, - }} + { + "logFile": { + "path": "/var/log/syslog", + "content": "log line", + "totalLines": 100, + "startLine": 1, + } + } ) ) tool = self._get_tool() @@ -787,9 +829,7 @@ class TestStorageToolRequests: @respx.mock async def test_unassigned_sends_correct_query(self) -> None: - route = respx.post(API_URL).mock( - return_value=_graphql_response({"unassignedDevices": []}) - ) + route = respx.post(API_URL).mock(return_value=_graphql_response({"unassignedDevices": []})) tool = self._get_tool() result = await tool(action="unassigned") body = _extract_request_body(route.calls.last.request) @@ -817,9 +857,13 @@ class TestNotificationsToolRequests: async def test_overview_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"notifications": {"overview": { - "unread": {"info": 1, "warning": 0, "alert": 0, "total": 1}, - }}} + { + "notifications": { + "overview": { + "unread": {"info": 1, "warning": 0, "alert": 0, "total": 1}, + } + } + } ) ) tool = self._get_tool() @@ -833,9 +877,7 @@ class TestNotificationsToolRequests: return_value=_graphql_response({"notifications": {"list": []}}) ) tool = self._get_tool() - await tool( - action="list", list_type="ARCHIVE", importance="WARNING", offset=5, limit=10 - ) + await tool(action="list", list_type="ARCHIVE", importance="WARNING", offset=5, limit=10) body = _extract_request_body(route.calls.last.request) assert "ListNotifications" in body["query"] filt = body["variables"]["filter"] @@ -859,9 +901,13 @@ class TestNotificationsToolRequests: async def test_create_sends_input_variables(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"createNotification": { - "id": "n1", "title": "Test", "importance": "INFO", - }} + { + "createNotification": { + "id": "n1", + "title": "Test", + "importance": "INFO", + } + } ) ) tool = self._get_tool() @@ -870,21 +916,19 @@ class TestNotificationsToolRequests: title="Test", subject="Sub", description="Desc", - importance="normal", + importance="info", ) body = _extract_request_body(route.calls.last.request) assert "CreateNotification" in body["query"] inp = body["variables"]["input"] assert inp["title"] == "Test" assert inp["subject"] == "Sub" - assert inp["importance"] == "NORMAL" # uppercased from "normal" + assert inp["importance"] == "INFO" # uppercased from "info" @respx.mock async def test_archive_sends_id_variable(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response( - {"archiveNotification": {"id": "notif-1"}} - ) + return_value=_graphql_response({"archiveNotification": {"id": "notif-1"}}) ) tool = self._get_tool() await tool(action="archive", notification_id="notif-1") @@ -901,9 +945,7 @@ class TestNotificationsToolRequests: @respx.mock async def test_delete_sends_id_and_type(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response( - {"deleteNotification": {"unread": {"total": 0}}} - ) + return_value=_graphql_response({"deleteNotification": {"unread": {"total": 0}}}) ) tool = self._get_tool() await tool( @@ -920,9 +962,7 @@ class TestNotificationsToolRequests: @respx.mock async def test_archive_all_sends_importance_when_provided(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response( - {"archiveAll": {"archive": {"total": 1}}} - ) + return_value=_graphql_response({"archiveAll": {"archive": {"total": 1}}}) ) tool = self._get_tool() await tool(action="archive_all", importance="warning") @@ -941,9 +981,7 @@ class TestRCloneToolRequests: @staticmethod def _get_tool(): - return make_tool_fn( - "unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone" - ) + return make_tool_fn("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone") @respx.mock async def test_list_remotes_sends_correct_query(self) -> None: @@ -962,9 +1000,15 @@ class TestRCloneToolRequests: async def test_config_form_sends_provider_type(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"rclone": {"configForm": { - "id": "form1", "dataSchema": {}, "uiSchema": {}, - }}} + { + "rclone": { + "configForm": { + "id": "form1", + "dataSchema": {}, + "uiSchema": {}, + } + } + } ) ) tool = self._get_tool() @@ -977,9 +1021,15 @@ class TestRCloneToolRequests: async def test_create_remote_sends_input_variables(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"rclone": {"createRCloneRemote": { - "name": "my-s3", "type": "s3", "parameters": {}, - }}} + { + "rclone": { + "createRCloneRemote": { + "name": "my-s3", + "type": "s3", + "parameters": {}, + } + } + } ) ) tool = self._get_tool() @@ -1025,18 +1075,20 @@ class TestUsersToolRequests: @staticmethod def _get_tool(): - return make_tool_fn( - "unraid_mcp.tools.users", "register_users_tool", "unraid_users" - ) + return make_tool_fn("unraid_mcp.tools.users", "register_users_tool", "unraid_users") @respx.mock async def test_me_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"me": { - "id": "u1", "name": "admin", - "description": "Admin", "roles": ["admin"], - }} + { + "me": { + "id": "u1", + "name": "admin", + "description": "Admin", + "roles": ["admin"], + } + } ) ) tool = self._get_tool() @@ -1061,9 +1113,7 @@ class TestKeysToolRequests: @respx.mock async def test_list_sends_correct_query(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response( - {"apiKeys": [{"id": "k1", "name": "my-key"}]} - ) + return_value=_graphql_response({"apiKeys": [{"id": "k1", "name": "my-key"}]}) ) tool = self._get_tool() result = await tool(action="list") @@ -1088,10 +1138,16 @@ class TestKeysToolRequests: async def test_create_sends_input_variables(self) -> None: route = respx.post(API_URL).mock( return_value=_graphql_response( - {"apiKey": {"create": { - "id": "k2", "name": "new-key", - "key": "secret", "roles": ["read"], - }}} + { + "apiKey": { + "create": { + "id": "k2", + "name": "new-key", + "key": "secret", + "roles": ["read"], + } + } + } ) ) tool = self._get_tool() @@ -1147,15 +1203,11 @@ class TestHealthToolRequests: @staticmethod def _get_tool(): - return make_tool_fn( - "unraid_mcp.tools.health", "register_health_tool", "unraid_health" - ) + return make_tool_fn("unraid_mcp.tools.health", "register_health_tool", "unraid_health") @respx.mock async def test_test_connection_sends_online_query(self) -> None: - route = respx.post(API_URL).mock( - return_value=_graphql_response({"online": True}) - ) + route = respx.post(API_URL).mock(return_value=_graphql_response({"online": True})) tool = self._get_tool() result = await tool(action="test_connection") body = _extract_request_body(route.calls.last.request) @@ -1166,21 +1218,23 @@ class TestHealthToolRequests: @respx.mock async def test_check_sends_comprehensive_query(self) -> None: route = respx.post(API_URL).mock( - return_value=_graphql_response({ - "info": { - "machineId": "m1", - "time": 1234567890, - "versions": {"unraid": "7.0"}, - "os": {"uptime": 86400}, - }, - "array": {"state": "STARTED"}, - "notifications": { - "overview": {"unread": {"alert": 0, "warning": 1, "total": 3}}, - }, - "docker": { - "containers": [{"id": "c1", "state": "running", "status": "Up"}], - }, - }) + return_value=_graphql_response( + { + "info": { + "machineId": "m1", + "time": 1234567890, + "versions": {"unraid": "7.0"}, + "os": {"uptime": 86400}, + }, + "array": {"state": "STARTED"}, + "notifications": { + "overview": {"unread": {"alert": 0, "warning": 1, "total": 3}}, + }, + "docker": { + "containers": [{"id": "c1", "state": "running", "status": "Up"}], + }, + } + ) ) tool = self._get_tool() result = await tool(action="check") @@ -1191,9 +1245,7 @@ class TestHealthToolRequests: @respx.mock async def test_test_connection_measures_latency(self) -> None: - respx.post(API_URL).mock( - return_value=_graphql_response({"online": True}) - ) + respx.post(API_URL).mock(return_value=_graphql_response({"online": True})) tool = self._get_tool() result = await tool(action="test_connection") assert "latency_ms" in result @@ -1202,18 +1254,21 @@ class TestHealthToolRequests: @respx.mock async def test_check_reports_warning_on_alerts(self) -> None: respx.post(API_URL).mock( - return_value=_graphql_response({ - "info": { - "machineId": "m1", "time": 0, - "versions": {"unraid": "7.0"}, - "os": {"uptime": 0}, - }, - "array": {"state": "STARTED"}, - "notifications": { - "overview": {"unread": {"alert": 3, "warning": 0, "total": 5}}, - }, - "docker": {"containers": []}, - }) + return_value=_graphql_response( + { + "info": { + "machineId": "m1", + "time": 0, + "versions": {"unraid": "7.0"}, + "os": {"uptime": 0}, + }, + "array": {"state": "STARTED"}, + "notifications": { + "overview": {"unread": {"alert": 3, "warning": 0, "total": 5}}, + }, + "docker": {"containers": []}, + } + ) ) tool = self._get_tool() result = await tool(action="check") @@ -1252,24 +1307,16 @@ class TestCrossCuttingConcerns: @respx.mock async def test_tool_error_from_http_layer_propagates(self) -> None: """When an HTTP error occurs, the ToolError bubbles up through the tool.""" - respx.post(API_URL).mock( - return_value=httpx.Response(500, text="Server Error") - ) - tool = make_tool_fn( - "unraid_mcp.tools.info", "register_info_tool", "unraid_info" - ) + respx.post(API_URL).mock(return_value=httpx.Response(500, text="Server Error")) + tool = make_tool_fn("unraid_mcp.tools.info", "register_info_tool", "unraid_info") with pytest.raises(ToolError, match="Unraid API returned HTTP 500"): await tool(action="online") @respx.mock async def test_network_error_propagates_through_tool(self) -> None: """When a network error occurs, the ToolError bubbles up through the tool.""" - respx.post(API_URL).mock( - side_effect=httpx.ConnectError("Connection refused") - ) - tool = make_tool_fn( - "unraid_mcp.tools.info", "register_info_tool", "unraid_info" - ) + respx.post(API_URL).mock(side_effect=httpx.ConnectError("Connection refused")) + tool = make_tool_fn("unraid_mcp.tools.info", "register_info_tool", "unraid_info") with pytest.raises(ToolError, match="Network error connecting to Unraid API"): await tool(action="online") @@ -1277,12 +1324,8 @@ class TestCrossCuttingConcerns: async def test_graphql_error_propagates_through_tool(self) -> None: """When a GraphQL error occurs, the ToolError bubbles up through the tool.""" respx.post(API_URL).mock( - return_value=_graphql_response( - errors=[{"message": "Permission denied"}] - ) - ) - tool = make_tool_fn( - "unraid_mcp.tools.info", "register_info_tool", "unraid_info" + return_value=_graphql_response(errors=[{"message": "Permission denied"}]) ) + tool = make_tool_fn("unraid_mcp.tools.info", "register_info_tool", "unraid_info") with pytest.raises(ToolError, match="Permission denied"): await tool(action="online") diff --git a/tests/safety/test_destructive_guards.py b/tests/safety/test_destructive_guards.py index 4432bfe..09fe49b 100644 --- a/tests/safety/test_destructive_guards.py +++ b/tests/safety/test_destructive_guards.py @@ -88,8 +88,7 @@ class TestDestructiveActionRegistries: """Each tool's DESTRUCTIVE_ACTIONS must exactly match the audited set.""" info = KNOWN_DESTRUCTIVE[tool_key] assert info["runtime_set"] == info["actions"], ( - f"{tool_key}: DESTRUCTIVE_ACTIONS is {info['runtime_set']}, " - f"expected {info['actions']}" + f"{tool_key}: DESTRUCTIVE_ACTIONS is {info['runtime_set']}, expected {info['actions']}" ) @pytest.mark.parametrize("tool_key", list(KNOWN_DESTRUCTIVE.keys())) @@ -131,7 +130,8 @@ class TestDestructiveActionRegistries: missing.extend( f"{tool_key}/{action_name}" for action_name in mutations - if ("delete" in action_name or "remove" in action_name) and action_name not in destructive + if ("delete" in action_name or "remove" in action_name) + and action_name not in destructive ) assert not missing, ( f"Mutations with 'delete'/'remove' not in DESTRUCTIVE_ACTIONS: {missing}" @@ -198,7 +198,11 @@ def _mock_keys_graphql() -> Generator[AsyncMock, None, None]: _TOOL_REGISTRY = { "docker": ("unraid_mcp.tools.docker", "register_docker_tool", "unraid_docker"), "vm": ("unraid_mcp.tools.virtualization", "register_vm_tool", "unraid_vm"), - "notifications": ("unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications"), + "notifications": ( + "unraid_mcp.tools.notifications", + "register_notifications_tool", + "unraid_notifications", + ), "rclone": ("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone"), "keys": ("unraid_mcp.tools.keys", "register_keys_tool", "unraid_keys"), } @@ -275,7 +279,11 @@ class TestConfirmAllowsExecution: async def test_docker_update_all_with_confirm(self, _mock_docker_graphql: AsyncMock) -> None: _mock_docker_graphql.return_value = { - "docker": {"updateAllContainers": [{"id": "c1", "names": ["app"], "state": "running", "status": "Up"}]} + "docker": { + "updateAllContainers": [ + {"id": "c1", "names": ["app"], "state": "running", "status": "Up"} + ] + } } tool_fn = make_tool_fn("unraid_mcp.tools.docker", "register_docker_tool", "unraid_docker") result = await tool_fn(action="update_all", confirm=True) @@ -305,7 +313,12 @@ class TestConfirmAllowsExecution: assert result["success"] is True async def test_notifications_delete_with_confirm(self, _mock_notif_graphql: AsyncMock) -> None: - _mock_notif_graphql.return_value = {"deleteNotification": {"unread": {"total": 0}}} + _mock_notif_graphql.return_value = { + "deleteNotification": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } tool_fn = make_tool_fn( "unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications" ) @@ -317,8 +330,15 @@ class TestConfirmAllowsExecution: ) assert result["success"] is True - async def test_notifications_delete_archived_with_confirm(self, _mock_notif_graphql: AsyncMock) -> None: - _mock_notif_graphql.return_value = {"deleteArchivedNotifications": {"archive": {"total": 0}}} + async def test_notifications_delete_archived_with_confirm( + self, _mock_notif_graphql: AsyncMock + ) -> None: + _mock_notif_graphql.return_value = { + "deleteArchivedNotifications": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } tool_fn = make_tool_fn( "unraid_mcp.tools.notifications", "register_notifications_tool", "unraid_notifications" ) diff --git a/tests/schema/test_query_validation.py b/tests/schema/test_query_validation.py index c72aad6..c08ba40 100644 --- a/tests/schema/test_query_validation.py +++ b/tests/schema/test_query_validation.py @@ -153,10 +153,25 @@ class TestInfoQueries: from unraid_mcp.tools.info import QUERIES expected_actions = { - "overview", "array", "network", "registration", "connect", - "variables", "metrics", "services", "display", "config", - "online", "owner", "settings", "server", "servers", - "flash", "ups_devices", "ups_device", "ups_config", + "overview", + "array", + "network", + "registration", + "connect", + "variables", + "metrics", + "services", + "display", + "config", + "online", + "owner", + "settings", + "server", + "servers", + "flash", + "ups_devices", + "ups_device", + "ups_config", } assert set(QUERIES.keys()) == expected_actions @@ -314,8 +329,13 @@ class TestDockerQueries: from unraid_mcp.tools.docker import QUERIES expected = { - "list", "details", "logs", "networks", - "network_details", "port_conflicts", "check_updates", + "list", + "details", + "logs", + "networks", + "network_details", + "port_conflicts", + "check_updates", } assert set(QUERIES.keys()) == expected @@ -520,7 +540,19 @@ class TestNotificationMutations: def test_all_notification_mutations_covered(self, schema: GraphQLSchema) -> None: from unraid_mcp.tools.notifications import MUTATIONS - expected = {"create", "archive", "unread", "delete", "delete_archived", "archive_all"} + expected = { + "create", + "archive", + "unread", + "delete", + "delete_archived", + "archive_all", + "archive_many", + "create_unique", + "unarchive_many", + "unarchive_all", + "recalculate", + } assert set(MUTATIONS.keys()) == expected @@ -713,8 +745,7 @@ class TestSchemaCompleteness: failures.append(f"{tool_name}/MUTATIONS/{action}: {errors[0]}") assert not failures, ( - f"{len(failures)} of {total} operations failed validation:\n" - + "\n".join(failures) + f"{len(failures)} of {total} operations failed validation:\n" + "\n".join(failures) ) def test_schema_has_query_type(self, schema: GraphQLSchema) -> None: diff --git a/tests/test_array.py b/tests/test_array.py index 9837022..d7d786f 100644 --- a/tests/test_array.py +++ b/tests/test_array.py @@ -43,6 +43,7 @@ class TestArrayValidation: tool_fn = _make_tool() with pytest.raises(ToolError, match="correct is required"): await tool_fn(action="parity_start") + _mock_graphql.assert_not_called() class TestArrayActions: @@ -53,6 +54,8 @@ class TestArrayActions: assert result["success"] is True assert result["action"] == "parity_start" _mock_graphql.assert_called_once() + call_args = _mock_graphql.call_args + assert call_args[0][1] == {"correct": False} async def test_parity_start_with_correct(self, _mock_graphql: AsyncMock) -> None: _mock_graphql.return_value = {"parityCheck": {"start": True}} diff --git a/tests/test_notifications.py b/tests/test_notifications.py index 890d5fd..1d3cf2c 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -90,7 +90,7 @@ class TestNotificationsActions: title="Test", subject="Test Subject", description="Test Desc", - importance="normal", + importance="info", ) assert result["success"] is True @@ -101,7 +101,12 @@ class TestNotificationsActions: assert result["success"] is True async def test_delete_with_confirm(self, _mock_graphql: AsyncMock) -> None: - _mock_graphql.return_value = {"deleteNotification": {"unread": {"total": 0}}} + _mock_graphql.return_value = { + "deleteNotification": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } tool_fn = _make_tool() result = await tool_fn( action="delete", @@ -112,7 +117,12 @@ class TestNotificationsActions: assert result["success"] is True async def test_archive_all(self, _mock_graphql: AsyncMock) -> None: - _mock_graphql.return_value = {"archiveAll": {"archive": {"total": 1}}} + _mock_graphql.return_value = { + "archiveAll": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 1}, + } + } tool_fn = _make_tool() result = await tool_fn(action="archive_all") assert result["success"] is True @@ -138,7 +148,12 @@ class TestNotificationsActions: assert filter_var["offset"] == 5 async def test_delete_archived(self, _mock_graphql: AsyncMock) -> None: - _mock_graphql.return_value = {"deleteArchivedNotifications": {"archive": {"total": 0}}} + _mock_graphql.return_value = { + "deleteArchivedNotifications": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } tool_fn = _make_tool() result = await tool_fn(action="delete_archived", confirm=True) assert result["success"] is True @@ -165,8 +180,8 @@ class TestNotificationsCreateValidation: importance="invalid", ) - async def test_info_importance_rejected(self, _mock_graphql: AsyncMock) -> None: - """INFO is listed in old docstring examples but rejected by the validator.""" + async def test_normal_importance_rejected(self, _mock_graphql: AsyncMock) -> None: + """NORMAL is not a valid GraphQL NotificationImportance value (INFO/WARNING/ALERT are).""" tool_fn = _make_tool() with pytest.raises(ToolError, match="importance must be one of"): await tool_fn( @@ -174,7 +189,7 @@ class TestNotificationsCreateValidation: title="T", subject="S", description="D", - importance="info", + importance="normal", ) async def test_alert_importance_accepted(self, _mock_graphql: AsyncMock) -> None: @@ -193,7 +208,7 @@ class TestNotificationsCreateValidation: title="x" * 201, subject="S", description="D", - importance="normal", + importance="info", ) async def test_subject_too_long_rejected(self, _mock_graphql: AsyncMock) -> None: @@ -204,7 +219,7 @@ class TestNotificationsCreateValidation: title="T", subject="x" * 501, description="D", - importance="normal", + importance="info", ) async def test_description_too_long_rejected(self, _mock_graphql: AsyncMock) -> None: @@ -215,17 +230,118 @@ class TestNotificationsCreateValidation: title="T", subject="S", description="x" * 2001, - importance="normal", + importance="info", ) async def test_title_at_max_accepted(self, _mock_graphql: AsyncMock) -> None: - _mock_graphql.return_value = {"createNotification": {"id": "n:1", "importance": "NORMAL"}} + _mock_graphql.return_value = {"createNotification": {"id": "n:1", "importance": "INFO"}} tool_fn = _make_tool() result = await tool_fn( action="create", title="x" * 200, subject="S", description="D", - importance="normal", + importance="info", ) assert result["success"] is True + + +class TestNewNotificationMutations: + async def test_archive_many_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "archiveNotifications": { + "unread": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + "archive": {"info": 2, "warning": 0, "alert": 0, "total": 2}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="archive_many", notification_ids=["n:1", "n:2"]) + assert result["success"] is True + call_args = _mock_graphql.call_args + assert call_args[0][1] == {"ids": ["n:1", "n:2"]} + + async def test_archive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="notification_ids"): + await tool_fn(action="archive_many") + + async def test_create_unique_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "notifyIfUnique": {"id": "n:1", "title": "Test", "importance": "INFO"} + } + tool_fn = _make_tool() + result = await tool_fn( + action="create_unique", + title="Test", + subject="Subj", + description="Desc", + importance="info", + ) + assert result["success"] is True + + async def test_create_unique_returns_none_when_duplicate( + self, _mock_graphql: AsyncMock + ) -> None: + _mock_graphql.return_value = {"notifyIfUnique": None} + tool_fn = _make_tool() + result = await tool_fn( + action="create_unique", + title="T", + subject="S", + description="D", + importance="info", + ) + assert result["success"] is True + assert result["duplicate"] is True + + async def test_create_unique_requires_fields(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="requires title"): + await tool_fn(action="create_unique") + + async def test_unarchive_many_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "unarchiveNotifications": { + "unread": {"info": 2, "warning": 0, "alert": 0, "total": 2}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="unarchive_many", notification_ids=["n:1", "n:2"]) + assert result["success"] is True + + async def test_unarchive_many_requires_ids(self, _mock_graphql: AsyncMock) -> None: + tool_fn = _make_tool() + with pytest.raises(ToolError, match="notification_ids"): + await tool_fn(action="unarchive_many") + + async def test_unarchive_all_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "unarchiveAll": { + "unread": {"info": 5, "warning": 1, "alert": 0, "total": 6}, + "archive": {"info": 0, "warning": 0, "alert": 0, "total": 0}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="unarchive_all") + assert result["success"] is True + + async def test_unarchive_all_with_importance(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "unarchiveAll": {"unread": {"total": 1}, "archive": {"total": 0}} + } + tool_fn = _make_tool() + await tool_fn(action="unarchive_all", importance="WARNING") + call_args = _mock_graphql.call_args + assert call_args[0][1] == {"importance": "WARNING"} + + async def test_recalculate_success(self, _mock_graphql: AsyncMock) -> None: + _mock_graphql.return_value = { + "recalculateOverview": { + "unread": {"info": 3, "warning": 1, "alert": 0, "total": 4}, + "archive": {"info": 10, "warning": 0, "alert": 0, "total": 10}, + } + } + tool_fn = _make_tool() + result = await tool_fn(action="recalculate") + assert result["success"] is True diff --git a/unraid_mcp/config/logging.py b/unraid_mcp/config/logging.py index f0193d8..e291ba4 100644 --- a/unraid_mcp/config/logging.py +++ b/unraid_mcp/config/logging.py @@ -53,21 +53,22 @@ class OverwriteFileHandler(logging.FileHandler): ): try: base_path = Path(self.baseFilename) - if base_path.exists(): - file_size = base_path.stat().st_size - if file_size >= self.max_bytes: - # Close current stream - if self.stream: - self.stream.close() - - # Remove the old file and start fresh - if base_path.exists(): - base_path.unlink() - - # Reopen with truncate mode + file_size = base_path.stat().st_size if base_path.exists() else 0 + if file_size >= self.max_bytes: + old_stream = self.stream + self.stream = None + try: + old_stream.close() + base_path.unlink(missing_ok=True) self.stream = self._open() + except OSError: + # Recovery: attempt to reopen even if unlink failed + try: + self.stream = self._open() + except OSError: + self.stream = old_stream # Last resort: restore original - # Log a marker that the file was reset + if self.stream is not None: reset_record = logging.LogRecord( name="UnraidMCPServer.Logging", level=logging.INFO, @@ -184,27 +185,8 @@ def configure_fastmcp_logger_with_rich() -> logging.Logger | None: fastmcp_logger.setLevel(numeric_log_level) - # Also configure the root logger to catch any other logs - root_logger = logging.getLogger() - root_logger.handlers.clear() - root_logger.propagate = False - - # Rich Console Handler for root logger - root_console_handler = RichHandler( - console=console, - show_time=True, - show_level=True, - show_path=False, - rich_tracebacks=True, - tracebacks_show_locals=False, - markup=True, - ) - root_console_handler.setLevel(numeric_log_level) - root_logger.addHandler(root_console_handler) - - # Reuse the shared file handler for root logger - root_logger.addHandler(_shared_file_handler) - root_logger.setLevel(numeric_log_level) + # Set root logger level to avoid suppressing library warnings entirely + logging.getLogger().setLevel(numeric_log_level) return fastmcp_logger diff --git a/unraid_mcp/config/settings.py b/unraid_mcp/config/settings.py index 1478199..90aa05d 100644 --- a/unraid_mcp/config/settings.py +++ b/unraid_mcp/config/settings.py @@ -36,8 +36,27 @@ for dotenv_path in dotenv_paths: UNRAID_API_URL = os.getenv("UNRAID_API_URL") UNRAID_API_KEY = os.getenv("UNRAID_API_KEY") + # Server Configuration -UNRAID_MCP_PORT = int(os.getenv("UNRAID_MCP_PORT", "6970")) +def _parse_port(env_var: str, default: int) -> int: + """Parse a port number from environment variable with validation.""" + raw = os.getenv(env_var, str(default)) + try: + port = int(raw) + except ValueError: + import sys + + print(f"FATAL: {env_var}={raw!r} is not a valid integer port number", file=sys.stderr) + sys.exit(1) + if not (1 <= port <= 65535): + import sys + + print(f"FATAL: {env_var}={port} outside valid port range 1-65535", file=sys.stderr) + sys.exit(1) + return port + + +UNRAID_MCP_PORT = _parse_port("UNRAID_MCP_PORT", 6970) UNRAID_MCP_HOST = os.getenv("UNRAID_MCP_HOST", "0.0.0.0") # noqa: S104 — intentional for Docker UNRAID_MCP_TRANSPORT = os.getenv("UNRAID_MCP_TRANSPORT", "streamable-http").lower() @@ -58,7 +77,7 @@ IS_DOCKER = Path("/.dockerenv").exists() LOGS_DIR = Path("/app/logs") if IS_DOCKER else PROJECT_ROOT / "logs" LOG_FILE_PATH = LOGS_DIR / LOG_FILE_NAME -# Ensure logs directory exists; if creation fails, fall back to /tmp. +# Ensure logs directory exists; if creation fails, fall back to PROJECT_ROOT / ".cache" / "logs". try: LOGS_DIR.mkdir(parents=True, exist_ok=True) except OSError: @@ -97,9 +116,11 @@ def get_config_summary() -> dict[str, Any]: """ is_valid, missing = validate_required_config() + from ..core.utils import safe_display_url + return { "api_url_configured": bool(UNRAID_API_URL), - "api_url_preview": UNRAID_API_URL[:20] + "..." if UNRAID_API_URL else None, + "api_url_preview": safe_display_url(UNRAID_API_URL) if UNRAID_API_URL else None, "api_key_configured": bool(UNRAID_API_KEY), "server_host": UNRAID_MCP_HOST, "server_port": UNRAID_MCP_PORT, @@ -110,5 +131,7 @@ def get_config_summary() -> dict[str, Any]: "config_valid": is_valid, "missing_config": missing if not is_valid else None, } + + # Re-export application version from a single source of truth. VERSION = APP_VERSION diff --git a/unraid_mcp/core/client.py b/unraid_mcp/core/client.py index eb452d6..10bdb92 100644 --- a/unraid_mcp/core/client.py +++ b/unraid_mcp/core/client.py @@ -51,9 +51,7 @@ def _is_sensitive_key(key: str) -> bool: def redact_sensitive(obj: Any) -> Any: """Recursively redact sensitive values from nested dicts/lists.""" if isinstance(obj, dict): - return { - k: ("***" if _is_sensitive_key(k) else redact_sensitive(v)) for k, v in obj.items() - } + return {k: ("***" if _is_sensitive_key(k) else redact_sensitive(v)) for k, v in obj.items()} if isinstance(obj, list): return [redact_sensitive(item) for item in obj] return obj @@ -149,10 +147,16 @@ class _QueryCache: Keyed by a hash of (query, variables). Entries expire after _CACHE_TTL_SECONDS. Only caches responses for queries whose operation name is in _CACHEABLE_QUERY_PREFIXES. Mutation requests always bypass the cache. + + Thread-safe via asyncio.Lock. Bounded to _MAX_ENTRIES with FIFO eviction (oldest + expiry timestamp evicted first when the store is full). """ + _MAX_ENTRIES: Final[int] = 256 + def __init__(self) -> None: self._store: dict[str, tuple[float, dict[str, Any]]] = {} + self._lock: Final[asyncio.Lock] = asyncio.Lock() @staticmethod def _cache_key(query: str, variables: dict[str, Any] | None) -> str: @@ -170,26 +174,32 @@ class _QueryCache: return False return match.group(1) in _CACHEABLE_QUERY_PREFIXES - def get(self, query: str, variables: dict[str, Any] | None) -> dict[str, Any] | None: + async def get(self, query: str, variables: dict[str, Any] | None) -> dict[str, Any] | None: """Return cached result if present and not expired, else None.""" - key = self._cache_key(query, variables) - entry = self._store.get(key) - if entry is None: - return None - expires_at, data = entry - if time.monotonic() > expires_at: - del self._store[key] - return None - return data + async with self._lock: + key = self._cache_key(query, variables) + entry = self._store.get(key) + if entry is None: + return None + expires_at, data = entry + if time.monotonic() > expires_at: + del self._store[key] + return None + return data - def put(self, query: str, variables: dict[str, Any] | None, data: dict[str, Any]) -> None: - """Store a query result with TTL expiry.""" - key = self._cache_key(query, variables) - self._store[key] = (time.monotonic() + _CACHE_TTL_SECONDS, data) + async def put(self, query: str, variables: dict[str, Any] | None, data: dict[str, Any]) -> None: + """Store a query result with TTL expiry, evicting oldest entry if at capacity.""" + async with self._lock: + if len(self._store) >= self._MAX_ENTRIES: + oldest_key = min(self._store, key=lambda k: self._store[k][0]) + del self._store[oldest_key] + key = self._cache_key(query, variables) + self._store[key] = (time.monotonic() + _CACHE_TTL_SECONDS, data) - def invalidate_all(self) -> None: + async def invalidate_all(self) -> None: """Clear the entire cache (called after mutations).""" - self._store.clear() + async with self._lock: + self._store.clear() _query_cache = _QueryCache() @@ -310,10 +320,10 @@ async def make_graphql_request( if not UNRAID_API_KEY: raise ToolError("UNRAID_API_KEY not configured") - # Check TTL cache for stable read-only queries + # Check TTL cache — short-circuits rate limiter on hits is_mutation = query.lstrip().startswith("mutation") if not is_mutation and _query_cache.is_cacheable(query): - cached = _query_cache.get(query, variables) + cached = await _query_cache.get(query, variables) if cached is not None: logger.debug("Returning cached response for query") return cached @@ -399,9 +409,9 @@ async def make_graphql_request( # Invalidate cache on mutations; cache eligible query results if is_mutation: - _query_cache.invalidate_all() + await _query_cache.invalidate_all() elif _query_cache.is_cacheable(query): - _query_cache.put(query, variables, result) + await _query_cache.put(query, variables, result) return result diff --git a/unraid_mcp/main.py b/unraid_mcp/main.py index a2957d5..b7c8859 100644 --- a/unraid_mcp/main.py +++ b/unraid_mcp/main.py @@ -11,12 +11,19 @@ import sys async def shutdown_cleanup() -> None: """Cleanup resources on server shutdown.""" + try: + from .subscriptions.manager import subscription_manager + + await subscription_manager.stop_all() + except Exception as e: + print(f"Error stopping subscriptions during cleanup: {e}", file=sys.stderr) + try: from .core.client import close_http_client await close_http_client() except Exception as e: - print(f"Error during cleanup: {e}") + print(f"Error during cleanup: {e}", file=sys.stderr) def _run_shutdown_cleanup() -> None: diff --git a/unraid_mcp/server.py b/unraid_mcp/server.py index 91794af..1c13a88 100644 --- a/unraid_mcp/server.py +++ b/unraid_mcp/server.py @@ -10,8 +10,6 @@ from fastmcp import FastMCP from .config.logging import logger from .config.settings import ( - UNRAID_API_KEY, - UNRAID_API_URL, UNRAID_MCP_HOST, UNRAID_MCP_PORT, UNRAID_MCP_TRANSPORT, @@ -86,20 +84,10 @@ def run_server() -> None: ) sys.exit(1) - # Log configuration - if UNRAID_API_URL: - logger.info(f"UNRAID_API_URL loaded: {UNRAID_API_URL[:20]}...") - else: - logger.warning("UNRAID_API_URL not found in environment or .env file.") + # Log configuration (delegated to shared function) + from .config.logging import log_configuration_status - if UNRAID_API_KEY: - logger.info("UNRAID_API_KEY loaded: ****") - else: - logger.warning("UNRAID_API_KEY not found in environment or .env file.") - - logger.info(f"UNRAID_MCP_PORT set to: {UNRAID_MCP_PORT}") - logger.info(f"UNRAID_MCP_HOST set to: {UNRAID_MCP_HOST}") - logger.info(f"UNRAID_MCP_TRANSPORT set to: {UNRAID_MCP_TRANSPORT}") + log_configuration_status(logger) if UNRAID_VERIFY_SSL is False: logger.warning( diff --git a/unraid_mcp/subscriptions/diagnostics.py b/unraid_mcp/subscriptions/diagnostics.py index b9dbcad..5862455 100644 --- a/unraid_mcp/subscriptions/diagnostics.py +++ b/unraid_mcp/subscriptions/diagnostics.py @@ -22,7 +22,7 @@ from ..core.exceptions import ToolError from ..core.utils import safe_display_url from .manager import subscription_manager from .resources import ensure_subscriptions_started -from .utils import build_ws_ssl_context, build_ws_url +from .utils import _analyze_subscription_status, build_ws_ssl_context, build_ws_url _ALLOWED_SUBSCRIPTION_NAMES = frozenset( @@ -187,8 +187,10 @@ def register_diagnostic_tools(mcp: FastMCP) -> None: # Get comprehensive status status = await subscription_manager.get_subscription_status() - # Initialize connection issues list with proper type - connection_issues: list[dict[str, Any]] = [] + # Analyze connection issues and error counts via the shared helper. + # This ensures "invalid_uri" and all other error states are counted + # consistently with the health tool's _diagnose_subscriptions path. + error_count, connection_issues = _analyze_subscription_status(status) # Add environment info with explicit typing diagnostic_info: dict[str, Any] = { @@ -210,7 +212,7 @@ def register_diagnostic_tools(mcp: FastMCP) -> None: ), "active_count": len(subscription_manager.active_subscriptions), "with_data": len(subscription_manager.resource_data), - "in_error_state": 0, + "in_error_state": error_count, "connection_issues": connection_issues, }, } @@ -219,23 +221,6 @@ def register_diagnostic_tools(mcp: FastMCP) -> None: with contextlib.suppress(ValueError): diagnostic_info["environment"]["websocket_url"] = build_ws_url() - # Analyze issues - for sub_name, sub_status in status.items(): - runtime = sub_status.get("runtime", {}) - connection_state = runtime.get("connection_state", "unknown") - - if connection_state in ["error", "auth_failed", "timeout", "max_retries_exceeded"]: - diagnostic_info["summary"]["in_error_state"] += 1 - - if runtime.get("last_error"): - connection_issues.append( - { - "subscription": sub_name, - "state": connection_state, - "error": runtime["last_error"], - } - ) - # Add troubleshooting recommendations recommendations: list[str] = [] diff --git a/unraid_mcp/subscriptions/manager.py b/unraid_mcp/subscriptions/manager.py index c58cd5a..dda682c 100644 --- a/unraid_mcp/subscriptions/manager.py +++ b/unraid_mcp/subscriptions/manager.py @@ -45,7 +45,7 @@ def _cap_log_content(data: dict[str, Any]) -> dict[str, Any]: elif ( key == "content" and isinstance(value, str) - and len(value.encode("utf-8", errors="replace")) > _MAX_RESOURCE_DATA_BYTES + and len(value) > _MAX_RESOURCE_DATA_BYTES # fast pre-check on char count ): lines = value.splitlines() original_line_count = len(lines) @@ -54,19 +54,15 @@ def _cap_log_content(data: dict[str, Any]) -> dict[str, Any]: if len(lines) > _MAX_RESOURCE_DATA_LINES: lines = lines[-_MAX_RESOURCE_DATA_LINES:] - # Enforce byte cap while preserving whole-line boundaries where possible. truncated = "\n".join(lines) - truncated_bytes = truncated.encode("utf-8", errors="replace") - while len(lines) > 1 and len(truncated_bytes) > _MAX_RESOURCE_DATA_BYTES: - lines = lines[1:] - truncated = "\n".join(lines) - truncated_bytes = truncated.encode("utf-8", errors="replace") - - # Last resort: if a single line still exceeds cap, hard-cap bytes. - if len(truncated_bytes) > _MAX_RESOURCE_DATA_BYTES: - truncated = truncated_bytes[-_MAX_RESOURCE_DATA_BYTES :].decode( - "utf-8", errors="ignore" - ) + # Encode once and slice bytes instead of O(n²) line-trim loop + encoded = truncated.encode("utf-8", errors="replace") + if len(encoded) > _MAX_RESOURCE_DATA_BYTES: + truncated = encoded[-_MAX_RESOURCE_DATA_BYTES:].decode("utf-8", errors="ignore") + # Strip partial first line that may have been cut mid-character + nl_pos = truncated.find("\n") + if nl_pos != -1: + truncated = truncated[nl_pos + 1 :] logger.warning( f"[RESOURCE] Capped log content from {original_line_count} to " @@ -202,6 +198,16 @@ class SubscriptionManager: else: logger.warning(f"[SUBSCRIPTION:{subscription_name}] No active subscription to stop") + async def stop_all(self) -> None: + """Stop all active subscriptions (called during server shutdown).""" + subscription_names = list(self.active_subscriptions.keys()) + for name in subscription_names: + try: + await self.stop_subscription(name) + except Exception as e: + logger.error(f"[SHUTDOWN] Error stopping subscription '{name}': {e}", exc_info=True) + logger.info(f"[SHUTDOWN] Stopped {len(subscription_names)} subscription(s)") + async def _subscription_loop( self, subscription_name: str, query: str, variables: dict[str, Any] | None ) -> None: @@ -512,9 +518,11 @@ class SubscriptionManager: f"({connected_duration:.0f}s < {_STABLE_CONNECTION_SECONDS}s), " f"keeping retry counter at {self.reconnect_attempts.get(subscription_name, 0)}" ) - - # Calculate backoff delay - retry_delay = min(retry_delay * 1.5, max_retry_delay) + # Only escalate backoff when connection was NOT stable + retry_delay = min(retry_delay * 1.5, max_retry_delay) + else: + # No connection was established — escalate backoff + retry_delay = min(retry_delay * 1.5, max_retry_delay) logger.info( f"[WEBSOCKET:{subscription_name}] Reconnecting in {retry_delay:.1f} seconds..." ) diff --git a/unraid_mcp/subscriptions/resources.py b/unraid_mcp/subscriptions/resources.py index 850ac1c..cd780a5 100644 --- a/unraid_mcp/subscriptions/resources.py +++ b/unraid_mcp/subscriptions/resources.py @@ -4,8 +4,10 @@ This module defines MCP resources that bridge between the subscription manager and the MCP protocol, providing fallback queries when subscription data is unavailable. """ +import asyncio import json import os +from typing import Final import anyio from fastmcp import FastMCP @@ -16,22 +18,29 @@ from .manager import subscription_manager # Global flag to track subscription startup _subscriptions_started = False +_startup_lock: Final[asyncio.Lock] = asyncio.Lock() async def ensure_subscriptions_started() -> None: """Ensure subscriptions are started, called from async context.""" global _subscriptions_started + # Fast-path: skip lock if already started if _subscriptions_started: return - logger.info("[STARTUP] First async operation detected, starting subscriptions...") - try: - await autostart_subscriptions() - _subscriptions_started = True - logger.info("[STARTUP] Subscriptions started successfully") - except Exception as e: - logger.error(f"[STARTUP] Failed to start subscriptions: {e}", exc_info=True) + # Slow-path: acquire lock for initialization (double-checked locking) + async with _startup_lock: + if _subscriptions_started: + return + + logger.info("[STARTUP] First async operation detected, starting subscriptions...") + try: + await autostart_subscriptions() + _subscriptions_started = True + logger.info("[STARTUP] Subscriptions started successfully") + except Exception as e: + logger.error(f"[STARTUP] Failed to start subscriptions: {e}", exc_info=True) async def autostart_subscriptions() -> None: diff --git a/unraid_mcp/subscriptions/utils.py b/unraid_mcp/subscriptions/utils.py index 45c3634..83c1c4d 100644 --- a/unraid_mcp/subscriptions/utils.py +++ b/unraid_mcp/subscriptions/utils.py @@ -1,6 +1,7 @@ """Shared utilities for the subscription system.""" import ssl as _ssl +from typing import Any from ..config.settings import UNRAID_API_URL, UNRAID_VERIFY_SSL @@ -52,3 +53,37 @@ def build_ws_ssl_context(ws_url: str) -> _ssl.SSLContext | None: ctx.check_hostname = False ctx.verify_mode = _ssl.CERT_NONE return ctx + + +def _analyze_subscription_status( + status: dict[str, Any], +) -> tuple[int, list[dict[str, Any]]]: + """Analyze subscription status dict, returning error count and connection issues. + + This is the canonical, shared implementation used by both the health tool + and the subscription diagnostics tool. + + Args: + status: Dict of subscription name -> status info from get_subscription_status(). + + Returns: + Tuple of (error_count, connection_issues_list). + """ + error_count = 0 + connection_issues: list[dict[str, Any]] = [] + + for sub_name, sub_status in status.items(): + runtime = sub_status.get("runtime", {}) + conn_state = runtime.get("connection_state", "unknown") + if conn_state in ("error", "auth_failed", "timeout", "max_retries_exceeded", "invalid_uri"): + error_count += 1 + if runtime.get("last_error"): + connection_issues.append( + { + "subscription": sub_name, + "state": conn_state, + "error": runtime["last_error"], + } + ) + + return error_count, connection_issues diff --git a/unraid_mcp/tools/__init__.py b/unraid_mcp/tools/__init__.py index c863e40..3216355 100644 --- a/unraid_mcp/tools/__init__.py +++ b/unraid_mcp/tools/__init__.py @@ -1,14 +1,14 @@ """MCP tools organized by functional domain. -10 consolidated tools with ~90 actions total: +10 consolidated tools with 76 actions total: unraid_info - System information queries (19 actions) - unraid_array - Array operations and power management (12 actions) + unraid_array - Array operations and parity management (5 actions) unraid_storage - Storage, disks, and logs (6 actions) unraid_docker - Docker container management (15 actions) unraid_vm - Virtual machine management (9 actions) unraid_notifications - Notification management (9 actions) unraid_rclone - Cloud storage remotes (4 actions) - unraid_users - User management (8 actions) + unraid_users - User management (1 action) unraid_keys - API key management (5 actions) unraid_health - Health monitoring and diagnostics (3 actions) """ diff --git a/unraid_mcp/tools/array.py b/unraid_mcp/tools/array.py index b712849..12a863b 100644 --- a/unraid_mcp/tools/array.py +++ b/unraid_mcp/tools/array.py @@ -73,7 +73,7 @@ def register_array_tool(mcp: FastMCP) -> None: """Manage Unraid array parity checks. Actions: - parity_start - Start parity check (optional correct=True to fix errors) + parity_start - Start parity check (correct=True to fix errors, correct=False for read-only; required) parity_pause - Pause running parity check parity_resume - Resume paused parity check parity_cancel - Cancel running parity check diff --git a/unraid_mcp/tools/docker.py b/unraid_mcp/tools/docker.py index b125551..a4f0862 100644 --- a/unraid_mcp/tools/docker.py +++ b/unraid_mcp/tools/docker.py @@ -233,8 +233,8 @@ async def _resolve_container_id(container_id: str, *, strict: bool = False) -> s data = await make_graphql_request(list_query) containers = safe_get(data, "docker", "containers", default=[]) - # Short hex prefix: match by ID prefix before trying name matching - if _DOCKER_SHORT_ID_PATTERN.match(container_id): + # Short hex prefix: match by ID prefix before trying name matching (strict bypasses this) + if not strict and _DOCKER_SHORT_ID_PATTERN.match(container_id): id_lower = container_id.lower() matches: list[dict[str, Any]] = [] for c in containers: diff --git a/unraid_mcp/tools/health.py b/unraid_mcp/tools/health.py index e025171..c87a803 100644 --- a/unraid_mcp/tools/health.py +++ b/unraid_mcp/tools/health.py @@ -21,6 +21,7 @@ from ..config.settings import ( from ..core.client import make_graphql_request from ..core.exceptions import ToolError, tool_error_handler from ..core.utils import safe_display_url +from ..subscriptions.utils import _analyze_subscription_status ALL_ACTIONS = {"check", "test_connection", "diagnose"} @@ -218,42 +219,6 @@ async def _comprehensive_check() -> dict[str, Any]: } -def _analyze_subscription_status( - status: dict[str, Any], -) -> tuple[int, list[dict[str, Any]]]: - """Analyze subscription status dict, returning error count and connection issues. - - This is the canonical implementation of subscription status analysis. - TODO: subscriptions/diagnostics.py has a similar status-analysis pattern - in diagnose_subscriptions(). That module could import and call this helper - directly to avoid divergence. See Code-H05. - - Args: - status: Dict of subscription name -> status info from get_subscription_status(). - - Returns: - Tuple of (error_count, connection_issues_list). - """ - error_count = 0 - connection_issues: list[dict[str, Any]] = [] - - for sub_name, sub_status in status.items(): - runtime = sub_status.get("runtime", {}) - conn_state = runtime.get("connection_state", "unknown") - if conn_state in ("error", "auth_failed", "timeout", "max_retries_exceeded"): - error_count += 1 - if runtime.get("last_error"): - connection_issues.append( - { - "subscription": sub_name, - "state": conn_state, - "error": runtime["last_error"], - } - ) - - return error_count, connection_issues - - async def _diagnose_subscriptions() -> dict[str, Any]: """Import and run subscription diagnostics.""" try: diff --git a/unraid_mcp/tools/keys.py b/unraid_mcp/tools/keys.py index 65dfeae..a761aa5 100644 --- a/unraid_mcp/tools/keys.py +++ b/unraid_mcp/tools/keys.py @@ -114,10 +114,14 @@ def register_keys_tool(mcp: FastMCP) -> None: if permissions is not None: input_data["permissions"] = permissions data = await make_graphql_request(MUTATIONS["create"], {"input": input_data}) - return { - "success": True, - "key": (data.get("apiKey") or {}).get("create", {}), - } + created_key = (data.get("apiKey") or {}).get("create") + if not created_key: + return { + "success": False, + "key": {}, + "message": "API key creation failed: no data returned from server", + } + return {"success": True, "key": created_key} if action == "update": if not key_id: @@ -128,10 +132,14 @@ def register_keys_tool(mcp: FastMCP) -> None: if roles is not None: input_data["roles"] = roles data = await make_graphql_request(MUTATIONS["update"], {"input": input_data}) - return { - "success": True, - "key": (data.get("apiKey") or {}).get("update", {}), - } + updated_key = (data.get("apiKey") or {}).get("update") + if not updated_key: + return { + "success": False, + "key": {}, + "message": "API key update failed: no data returned from server", + } + return {"success": True, "key": updated_key} if action == "delete": if not key_id: diff --git a/unraid_mcp/tools/notifications.py b/unraid_mcp/tools/notifications.py index a40eedb..e74b477 100644 --- a/unraid_mcp/tools/notifications.py +++ b/unraid_mcp/tools/notifications.py @@ -50,34 +50,80 @@ MUTATIONS: dict[str, str] = { """, "archive": """ mutation ArchiveNotification($id: PrefixedID!) { - archiveNotification(id: $id) + archiveNotification(id: $id) { id title importance } } """, "unread": """ mutation UnreadNotification($id: PrefixedID!) { - unreadNotification(id: $id) + unreadNotification(id: $id) { id title importance } } """, "delete": """ mutation DeleteNotification($id: PrefixedID!, $type: NotificationType!) { - deleteNotification(id: $id, type: $type) + deleteNotification(id: $id, type: $type) { + unread { info warning alert total } + archive { info warning alert total } + } } """, "delete_archived": """ mutation DeleteArchivedNotifications { - deleteArchivedNotifications + deleteArchivedNotifications { + unread { info warning alert total } + archive { info warning alert total } + } } """, "archive_all": """ mutation ArchiveAllNotifications($importance: NotificationImportance) { - archiveAll(importance: $importance) + archiveAll(importance: $importance) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "archive_many": """ + mutation ArchiveNotifications($ids: [PrefixedID!]!) { + archiveNotifications(ids: $ids) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "create_unique": """ + mutation NotifyIfUnique($input: NotificationData!) { + notifyIfUnique(input: $input) { id title importance } + } + """, + "unarchive_many": """ + mutation UnarchiveNotifications($ids: [PrefixedID!]!) { + unarchiveNotifications(ids: $ids) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "unarchive_all": """ + mutation UnarchiveAll($importance: NotificationImportance) { + unarchiveAll(importance: $importance) { + unread { info warning alert total } + archive { info warning alert total } + } + } + """, + "recalculate": """ + mutation RecalculateOverview { + recalculateOverview { + unread { info warning alert total } + archive { info warning alert total } + } } """, } DESTRUCTIVE_ACTIONS = {"delete", "delete_archived"} ALL_ACTIONS = set(QUERIES) | set(MUTATIONS) -_VALID_IMPORTANCE = {"ALERT", "WARNING", "NORMAL"} +_VALID_IMPORTANCE = {"ALERT", "WARNING", "INFO"} NOTIFICATION_ACTIONS = Literal[ "overview", @@ -89,6 +135,11 @@ NOTIFICATION_ACTIONS = Literal[ "delete", "delete_archived", "archive_all", + "archive_many", + "create_unique", + "unarchive_many", + "unarchive_all", + "recalculate", ] if set(get_args(NOTIFICATION_ACTIONS)) != ALL_ACTIONS: @@ -108,6 +159,7 @@ def register_notifications_tool(mcp: FastMCP) -> None: action: NOTIFICATION_ACTIONS, confirm: bool = False, notification_id: str | None = None, + notification_ids: list[str] | None = None, notification_type: str | None = None, importance: str | None = None, offset: int = 0, @@ -129,6 +181,11 @@ def register_notifications_tool(mcp: FastMCP) -> None: delete - Delete a notification (requires notification_id, notification_type, confirm=True) delete_archived - Delete all archived notifications (requires confirm=True) archive_all - Archive all notifications (optional importance filter) + archive_many - Archive multiple notifications by ID (requires notification_ids) + create_unique - Create notification only if no equivalent unread exists (requires title, subject, description, importance) + unarchive_many - Move notifications back to unread (requires notification_ids) + unarchive_all - Move all archived notifications to unread (optional importance filter) + recalculate - Recompute overview counts from disk """ if action not in ALL_ACTIONS: raise ToolError(f"Invalid action '{action}'. Must be one of: {sorted(ALL_ACTIONS)}") @@ -212,6 +269,55 @@ def register_notifications_tool(mcp: FastMCP) -> None: data = await make_graphql_request(MUTATIONS["archive_all"], variables) return {"success": True, "action": "archive_all", "data": data} + if action == "archive_many": + if not notification_ids: + raise ToolError("notification_ids is required for 'archive_many' action") + data = await make_graphql_request( + MUTATIONS["archive_many"], {"ids": notification_ids} + ) + return {"success": True, "action": "archive_many", "data": data} + + if action == "create_unique": + if title is None or subject is None or description is None or importance is None: + raise ToolError( + "create_unique requires title, subject, description, and importance" + ) + if importance.upper() not in _VALID_IMPORTANCE: + raise ToolError( + f"importance must be one of: {', '.join(sorted(_VALID_IMPORTANCE))}. " + f"Got: '{importance}'" + ) + input_data = { + "title": title, + "subject": subject, + "description": description, + "importance": importance.upper(), + } + data = await make_graphql_request(MUTATIONS["create_unique"], {"input": input_data}) + notification = data.get("notifyIfUnique") + if notification is None: + return {"success": True, "duplicate": True, "data": None} + return {"success": True, "duplicate": False, "data": notification} + + if action == "unarchive_many": + if not notification_ids: + raise ToolError("notification_ids is required for 'unarchive_many' action") + data = await make_graphql_request( + MUTATIONS["unarchive_many"], {"ids": notification_ids} + ) + return {"success": True, "action": "unarchive_many", "data": data} + + if action == "unarchive_all": + vars_: dict[str, Any] | None = None + if importance: + vars_ = {"importance": importance.upper()} + data = await make_graphql_request(MUTATIONS["unarchive_all"], vars_) + return {"success": True, "action": "unarchive_all", "data": data} + + if action == "recalculate": + data = await make_graphql_request(MUTATIONS["recalculate"]) + return {"success": True, "action": "recalculate", "data": data} + raise ToolError(f"Unhandled action '{action}' — this is a bug") logger.info("Notifications tool registered successfully") diff --git a/unraid_mcp/tools/virtualization.py b/unraid_mcp/tools/virtualization.py index 89166b5..f54baf7 100644 --- a/unraid_mcp/tools/virtualization.py +++ b/unraid_mcp/tools/virtualization.py @@ -114,56 +114,42 @@ def register_vm_tool(mcp: FastMCP) -> None: raise ToolError(f"Action '{action}' is destructive. Set confirm=True to proceed.") with tool_error_handler("vm", action, logger): - try: - logger.info(f"Executing unraid_vm action={action}") + logger.info(f"Executing unraid_vm action={action}") - if action == "list": - data = await make_graphql_request(QUERIES["list"]) - if data.get("vms"): - vms = data["vms"].get("domains") or data["vms"].get("domain") or [] - if isinstance(vms, dict): - vms = [vms] - return {"vms": vms} - return {"vms": []} - - if action == "details": - data = await make_graphql_request(QUERIES["details"]) - if not data.get("vms"): - raise ToolError("No VM data returned from server") + if action == "list": + data = await make_graphql_request(QUERIES["list"]) + if data.get("vms"): vms = data["vms"].get("domains") or data["vms"].get("domain") or [] if isinstance(vms, dict): vms = [vms] - for vm in vms: - if ( - vm.get("uuid") == vm_id - or vm.get("id") == vm_id - or vm.get("name") == vm_id - ): - return dict(vm) - available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms] - raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}") + return {"vms": vms} + return {"vms": []} - # Mutations - if action in MUTATIONS: - data = await make_graphql_request(MUTATIONS[action], {"id": vm_id}) - field = _MUTATION_FIELDS.get(action, action) - if data.get("vm") and field in data["vm"]: - return { - "success": data["vm"][field], - "action": action, - "vm_id": vm_id, - } - raise ToolError(f"Failed to {action} VM or unexpected response") + if action == "details": + data = await make_graphql_request(QUERIES["details"]) + if not data.get("vms"): + raise ToolError("No VM data returned from server") + vms = data["vms"].get("domains") or data["vms"].get("domain") or [] + if isinstance(vms, dict): + vms = [vms] + for vm in vms: + if vm.get("uuid") == vm_id or vm.get("id") == vm_id or vm.get("name") == vm_id: + return dict(vm) + available = [f"{v.get('name')} (UUID: {v.get('uuid')})" for v in vms] + raise ToolError(f"VM '{vm_id}' not found. Available: {', '.join(available)}") - raise ToolError(f"Unhandled action '{action}' — this is a bug") + # Mutations + if action in MUTATIONS: + data = await make_graphql_request(MUTATIONS[action], {"id": vm_id}) + field = _MUTATION_FIELDS.get(action, action) + if data.get("vm") and field in data["vm"]: + return { + "success": data["vm"][field], + "action": action, + "vm_id": vm_id, + } + raise ToolError(f"Failed to {action} VM or unexpected response") - except ToolError: - raise - except Exception as e: - if "VMs are not available" in str(e): - raise ToolError( - "VMs not available on this server. Check VM support is enabled." - ) from e - raise + raise ToolError(f"Unhandled action '{action}' — this is a bug") logger.info("VM tool registered successfully") diff --git a/uv.lock b/uv.lock index 313cdc1..b141fa9 100644 --- a/uv.lock +++ b/uv.lock @@ -1729,7 +1729,7 @@ wheels = [ [[package]] name = "unraid-mcp" -version = "0.2.0" +version = "0.2.1" source = { editable = "." } dependencies = [ { name = "fastapi" },