forked from HomeLab/unraid-mcp
fix: address 54 MEDIUM/LOW priority PR review issues
Comprehensive fixes across Python code, shell scripts, and documentation addressing all remaining MEDIUM and LOW priority review comments. Python Code Fixes (27 fixes): - tools/info.py: Simplified dispatch with lookup tables, defensive guards, CPU fallback formatting, !s conversion flags, module-level sync assertion - tools/docker.py: Case-insensitive container ID regex, keyword-only confirm, module-level ALL_ACTIONS constant - tools/virtualization.py: Normalized single-VM dict responses, unified list/details queries - core/client.py: Fixed HTTP client singleton race condition, compound key substring matching for sensitive data redaction - subscriptions/: Extracted SSL context creation to shared helper in utils.py, replaced deprecated ssl._create_unverified_context API - tools/array.py: Renamed parity_history to parity_status, hoisted ALL_ACTIONS - tools/storage.py: Fixed dict(None) risks, temperature 0 falsiness bug - tools/notifications.py, keys.py, rclone.py: Fixed dict(None) TypeError risks - tests/: Fixed generator type annotations, added coverage for compound keys Shell Script Fixes (13 fixes): - dashboard.sh: Dynamic server discovery, conditional debug output, null-safe jq, notification count guard order, removed unused variables - unraid-query.sh: Proper JSON escaping via jq, --ignore-errors and --insecure CLI flags, TLS verification now on by default - validate-marketplace.sh: Removed unused YELLOW variable, defensive jq, simplified repository URL output Documentation Fixes (24+ fixes): - Version consistency: Updated all references to v0.2.0 across pyproject.toml, plugin.json, marketplace.json, MARKETPLACE.md, __init__.py, README files - Tool count updates: Changed all "26 tools" references to "10 tools, 90 actions" - Markdown lint: Fixed MD022, MD031, MD047 issues across multiple files - Research docs: Fixed auth headers, removed web artifacts, corrected stale info - Skills docs: Fixed query examples, endpoint counts, env var references All 227 tests pass, ruff and ty checks clean.
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
"""Shared test fixtures and helpers for Unraid MCP server tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
@@ -8,7 +9,7 @@ from fastmcp import FastMCP
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_graphql_request() -> AsyncMock:
|
||||
def mock_graphql_request() -> Generator[AsyncMock, None, None]:
|
||||
"""Fixture that patches make_graphql_request at the core module.
|
||||
|
||||
NOTE: Since each tool file imports make_graphql_request into its own
|
||||
@@ -47,4 +48,4 @@ def make_tool_fn(
|
||||
register_fn = getattr(module, register_fn_name)
|
||||
test_mcp = FastMCP("test")
|
||||
register_fn(test_mcp)
|
||||
return test_mcp._tool_manager._tools[tool_name].fn
|
||||
return test_mcp._tool_manager._tools[tool_name].fn # type: ignore[union-attr]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_array tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.array.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -49,10 +50,10 @@ class TestArrayActions:
|
||||
call_args = _mock_graphql.call_args
|
||||
assert call_args[0][1] == {"correct": True}
|
||||
|
||||
async def test_parity_history(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_parity_status(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.return_value = {"array": {"parityCheckStatus": {"progress": 50}}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="parity_history")
|
||||
result = await tool_fn(action="parity_status")
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_mount_disk(self, _mock_graphql: AsyncMock) -> None:
|
||||
@@ -100,4 +101,70 @@ class TestArrayActions:
|
||||
_mock_graphql.side_effect = RuntimeError("disk error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="disk error"):
|
||||
await tool_fn(action="parity_history")
|
||||
await tool_fn(action="parity_status")
|
||||
|
||||
|
||||
class TestArrayMutationFailures:
|
||||
"""Tests for mutation responses that indicate failure."""
|
||||
|
||||
async def test_start_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning False in the response field should still succeed (the tool
|
||||
wraps the raw response; it doesn't inspect the inner boolean)."""
|
||||
_mock_graphql.return_value = {"setState": False}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": False}
|
||||
|
||||
async def test_start_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning null for the response field."""
|
||||
_mock_graphql.return_value = {"setState": None}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": None}
|
||||
|
||||
async def test_start_mutation_returns_empty_object(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mutation returning an empty object for the response field."""
|
||||
_mock_graphql.return_value = {"setState": {}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["data"] == {"setState": {}}
|
||||
|
||||
async def test_mount_disk_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""mountArrayDisk returning False indicates mount failed."""
|
||||
_mock_graphql.return_value = {"mountArrayDisk": False}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="mount_disk", disk_id="disk:1")
|
||||
assert result["success"] is True
|
||||
assert result["data"]["mountArrayDisk"] is False
|
||||
|
||||
async def test_mutation_timeout(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mid-operation timeout should be wrapped in ToolError."""
|
||||
|
||||
_mock_graphql.side_effect = TimeoutError("operation timed out")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="timed out"):
|
||||
await tool_fn(action="shutdown", confirm=True)
|
||||
|
||||
|
||||
class TestArrayNetworkErrors:
|
||||
"""Tests for network-level failures in array operations."""
|
||||
|
||||
async def test_http_500_server_error(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""HTTP 500 from the API should be wrapped in ToolError."""
|
||||
mock_response = AsyncMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.text = "Internal Server Error"
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 500: Internal Server Error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="HTTP error 500"):
|
||||
await tool_fn(action="start", confirm=True)
|
||||
|
||||
async def test_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Connection refused should be wrapped in ToolError."""
|
||||
_mock_graphql.side_effect = ToolError("Network connection error: Connection refused")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Network connection error"):
|
||||
await tool_fn(action="parity_status")
|
||||
|
||||
474
tests/test_client.py
Normal file
474
tests/test_client.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""Tests for unraid_mcp.core.client — GraphQL client infrastructure."""
|
||||
|
||||
import json
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
from unraid_mcp.core.client import (
|
||||
DEFAULT_TIMEOUT,
|
||||
DISK_TIMEOUT,
|
||||
_redact_sensitive,
|
||||
is_idempotent_error,
|
||||
make_graphql_request,
|
||||
)
|
||||
from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# is_idempotent_error
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestIsIdempotentError:
|
||||
"""Verify all idempotent error pattern matches."""
|
||||
|
||||
def test_start_already_started(self) -> None:
|
||||
assert is_idempotent_error("Container already started", "start") is True
|
||||
|
||||
def test_start_container_already_running(self) -> None:
|
||||
assert is_idempotent_error("container already running", "start") is True
|
||||
|
||||
def test_start_http_code_304(self) -> None:
|
||||
assert is_idempotent_error("HTTP code 304 - not modified", "start") is True
|
||||
|
||||
def test_stop_already_stopped(self) -> None:
|
||||
assert is_idempotent_error("Container already stopped", "stop") is True
|
||||
|
||||
def test_stop_not_running(self) -> None:
|
||||
assert is_idempotent_error("container not running", "stop") is True
|
||||
|
||||
def test_stop_http_code_304(self) -> None:
|
||||
assert is_idempotent_error("HTTP code 304", "stop") is True
|
||||
|
||||
def test_start_unrelated_error(self) -> None:
|
||||
assert is_idempotent_error("permission denied", "start") is False
|
||||
|
||||
def test_stop_unrelated_error(self) -> None:
|
||||
assert is_idempotent_error("image not found", "stop") is False
|
||||
|
||||
def test_unknown_operation(self) -> None:
|
||||
assert is_idempotent_error("already started", "restart") is False
|
||||
|
||||
def test_case_insensitive(self) -> None:
|
||||
assert is_idempotent_error("ALREADY STARTED", "start") is True
|
||||
assert is_idempotent_error("ALREADY STOPPED", "stop") is True
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _redact_sensitive
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRedactSensitive:
|
||||
"""Verify recursive redaction of sensitive keys."""
|
||||
|
||||
def test_flat_dict(self) -> None:
|
||||
data = {"username": "admin", "password": "hunter2", "host": "10.0.0.1"}
|
||||
result = _redact_sensitive(data)
|
||||
assert result["username"] == "admin"
|
||||
assert result["password"] == "***"
|
||||
assert result["host"] == "10.0.0.1"
|
||||
|
||||
def test_nested_dict(self) -> None:
|
||||
data = {"config": {"apiKey": "abc123", "url": "http://host"}}
|
||||
result = _redact_sensitive(data)
|
||||
assert result["config"]["apiKey"] == "***"
|
||||
assert result["config"]["url"] == "http://host"
|
||||
|
||||
def test_list_of_dicts(self) -> None:
|
||||
data = [{"token": "t1"}, {"name": "safe"}]
|
||||
result = _redact_sensitive(data)
|
||||
assert result[0]["token"] == "***"
|
||||
assert result[1]["name"] == "safe"
|
||||
|
||||
def test_deeply_nested(self) -> None:
|
||||
data = {"a": {"b": {"c": {"secret": "deep"}}}}
|
||||
result = _redact_sensitive(data)
|
||||
assert result["a"]["b"]["c"]["secret"] == "***"
|
||||
|
||||
def test_non_dict_passthrough(self) -> None:
|
||||
assert _redact_sensitive("plain_string") == "plain_string"
|
||||
assert _redact_sensitive(42) == 42
|
||||
assert _redact_sensitive(None) is None
|
||||
|
||||
def test_case_insensitive_keys(self) -> None:
|
||||
data = {"Password": "p1", "TOKEN": "t1", "ApiKey": "k1", "Secret": "s1", "Key": "x1"}
|
||||
result = _redact_sensitive(data)
|
||||
for v in result.values():
|
||||
assert v == "***"
|
||||
|
||||
def test_compound_key_names(self) -> None:
|
||||
"""Keys containing sensitive substrings (e.g. 'user_password') are redacted."""
|
||||
data = {
|
||||
"user_password": "p1",
|
||||
"api_key_value": "k1",
|
||||
"auth_token_expiry": "t1",
|
||||
"client_secret_id": "s1",
|
||||
"username": "safe",
|
||||
"host": "safe",
|
||||
}
|
||||
result = _redact_sensitive(data)
|
||||
assert result["user_password"] == "***"
|
||||
assert result["api_key_value"] == "***"
|
||||
assert result["auth_token_expiry"] == "***"
|
||||
assert result["client_secret_id"] == "***"
|
||||
assert result["username"] == "safe"
|
||||
assert result["host"] == "safe"
|
||||
|
||||
def test_mixed_list_content(self) -> None:
|
||||
data = [{"key": "val"}, "string", 123, [{"token": "inner"}]]
|
||||
result = _redact_sensitive(data)
|
||||
assert result[0]["key"] == "***"
|
||||
assert result[1] == "string"
|
||||
assert result[2] == 123
|
||||
assert result[3][0]["token"] == "***"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Timeout constants
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestTimeoutConstants:
|
||||
def test_default_timeout_read(self) -> None:
|
||||
assert DEFAULT_TIMEOUT.read == 30.0
|
||||
|
||||
def test_default_timeout_connect(self) -> None:
|
||||
assert DEFAULT_TIMEOUT.connect == 5.0
|
||||
|
||||
def test_disk_timeout_read(self) -> None:
|
||||
assert DISK_TIMEOUT.read == 90.0
|
||||
|
||||
def test_disk_timeout_connect(self) -> None:
|
||||
assert DISK_TIMEOUT.connect == 5.0
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# make_graphql_request — success paths
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestMakeGraphQLRequestSuccess:
|
||||
"""Test successful request paths."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _patch_config(self):
|
||||
with (
|
||||
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
||||
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
||||
):
|
||||
yield
|
||||
|
||||
async def test_simple_query(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {"data": {"info": {"os": "Unraid"}}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request("{ info { os } }")
|
||||
assert result == {"info": {"os": "Unraid"}}
|
||||
|
||||
async def test_query_with_variables(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {"data": {"container": {"name": "plex"}}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request(
|
||||
"query ($id: String!) { container(id: $id) { name } }",
|
||||
variables={"id": "abc123"},
|
||||
)
|
||||
assert result == {"container": {"name": "plex"}}
|
||||
# Verify variables were passed in the payload
|
||||
call_kwargs = mock_client.post.call_args
|
||||
assert call_kwargs.kwargs["json"]["variables"] == {"id": "abc123"}
|
||||
|
||||
async def test_custom_timeout_passed(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {"data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
custom_timeout = httpx.Timeout(10.0, read=90.0)
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
await make_graphql_request("{ info }", custom_timeout=custom_timeout)
|
||||
call_kwargs = mock_client.post.call_args
|
||||
assert call_kwargs.kwargs["timeout"] is custom_timeout
|
||||
|
||||
async def test_empty_data_returns_empty_dict(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {"data": None}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request("{ info }")
|
||||
assert result == {}
|
||||
|
||||
async def test_missing_data_key_returns_empty_dict(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request("{ info }")
|
||||
assert result == {}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# make_graphql_request — error paths
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestMakeGraphQLRequestErrors:
|
||||
"""Test error handling in make_graphql_request."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _patch_config(self):
|
||||
with (
|
||||
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
||||
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
||||
):
|
||||
yield
|
||||
|
||||
async def test_missing_api_url(self) -> None:
|
||||
with (
|
||||
patch("unraid_mcp.core.client.UNRAID_API_URL", ""),
|
||||
pytest.raises(ToolError, match="UNRAID_API_URL not configured"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_missing_api_key(self) -> None:
|
||||
with (
|
||||
patch("unraid_mcp.core.client.UNRAID_API_KEY", ""),
|
||||
pytest.raises(ToolError, match="UNRAID_API_KEY not configured"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_http_401_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 401
|
||||
mock_response.text = "Unauthorized"
|
||||
http_error = httpx.HTTPStatusError(
|
||||
"401 Unauthorized", request=MagicMock(), response=mock_response
|
||||
)
|
||||
mock_response.raise_for_status.side_effect = http_error
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="HTTP error 401"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_http_500_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.text = "Internal Server Error"
|
||||
http_error = httpx.HTTPStatusError(
|
||||
"500 Internal Server Error", request=MagicMock(), response=mock_response
|
||||
)
|
||||
mock_response.raise_for_status.side_effect = http_error
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="HTTP error 500"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_http_503_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 503
|
||||
mock_response.text = "Service Unavailable"
|
||||
http_error = httpx.HTTPStatusError(
|
||||
"503 Service Unavailable", request=MagicMock(), response=mock_response
|
||||
)
|
||||
mock_response.raise_for_status.side_effect = http_error
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="HTTP error 503"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_network_connection_refused(self) -> None:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.side_effect = httpx.ConnectError("Connection refused")
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Network connection error"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_network_timeout(self) -> None:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.side_effect = httpx.ReadTimeout("Read timed out")
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Network connection error"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_json_decode_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.side_effect = json.JSONDecodeError("Expecting value", "", 0)
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Invalid JSON response"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# make_graphql_request — GraphQL error handling
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGraphQLErrorHandling:
|
||||
"""Test GraphQL-level error parsing and idempotent handling."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _patch_config(self):
|
||||
with (
|
||||
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
||||
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
||||
):
|
||||
yield
|
||||
|
||||
async def test_graphql_error_raises_tool_error(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Field 'bogus' not found"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Field 'bogus' not found"),
|
||||
):
|
||||
await make_graphql_request("{ bogus }")
|
||||
|
||||
async def test_multiple_graphql_errors_joined(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [
|
||||
{"message": "Error one"},
|
||||
{"message": "Error two"},
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Error one; Error two"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
|
||||
async def test_idempotent_start_returns_success(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Container already running"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request(
|
||||
'mutation { docker { start(id: "x") } }',
|
||||
operation_context={"operation": "start"},
|
||||
)
|
||||
assert result["idempotent_success"] is True
|
||||
assert result["operation"] == "start"
|
||||
|
||||
async def test_idempotent_stop_returns_success(self) -> None:
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Container not running"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
||||
result = await make_graphql_request(
|
||||
'mutation { docker { stop(id: "x") } }',
|
||||
operation_context={"operation": "stop"},
|
||||
)
|
||||
assert result["idempotent_success"] is True
|
||||
assert result["operation"] == "stop"
|
||||
|
||||
async def test_non_idempotent_error_with_context_raises(self) -> None:
|
||||
"""An error that doesn't match idempotent patterns still raises even with context."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"message": "Permission denied"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="Permission denied"),
|
||||
):
|
||||
await make_graphql_request(
|
||||
'mutation { docker { start(id: "x") } }',
|
||||
operation_context={"operation": "start"},
|
||||
)
|
||||
|
||||
async def test_graphql_error_without_message_key(self) -> None:
|
||||
"""Error objects without a 'message' key fall back to str()."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {
|
||||
"errors": [{"code": "UNKNOWN", "detail": "something broke"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
|
||||
with (
|
||||
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
||||
pytest.raises(ToolError, match="GraphQL API error"),
|
||||
):
|
||||
await make_graphql_request("{ info }")
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_docker tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -8,6 +9,7 @@ from conftest import make_tool_fn
|
||||
from unraid_mcp.core.exceptions import ToolError
|
||||
from unraid_mcp.tools.docker import find_container_by_identifier, get_available_container_names
|
||||
|
||||
|
||||
# --- Unit tests for helpers ---
|
||||
|
||||
|
||||
@@ -52,7 +54,7 @@ class TestGetAvailableContainerNames:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.docker.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -204,3 +206,92 @@ class TestDockerActions:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="unexpected failure"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
|
||||
class TestDockerMutationFailures:
|
||||
"""Tests for mutation responses that indicate failure or unexpected shapes."""
|
||||
|
||||
async def test_remove_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""removeContainer returning null instead of True."""
|
||||
cid = "a" * 64 + ":local"
|
||||
_mock_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": cid, "names": ["old-app"]}]}},
|
||||
{"docker": {"removeContainer": None}},
|
||||
]
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="remove", container_id="old-app", confirm=True)
|
||||
assert result["success"] is True
|
||||
assert result["container"] is None
|
||||
|
||||
async def test_start_mutation_empty_docker_response(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""docker field returning empty object (missing the action sub-field)."""
|
||||
cid = "a" * 64 + ":local"
|
||||
_mock_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": cid, "names": ["plex"]}]}},
|
||||
{"docker": {}},
|
||||
]
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", container_id="plex")
|
||||
assert result["success"] is True
|
||||
assert result["container"] is None
|
||||
|
||||
async def test_stop_mutation_returns_false_state(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Stop mutation returning a container with unexpected state."""
|
||||
cid = "a" * 64 + ":local"
|
||||
_mock_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": cid, "names": ["plex"]}]}},
|
||||
{"docker": {"stop": {"id": cid, "state": "running"}}},
|
||||
]
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="stop", container_id="plex")
|
||||
assert result["success"] is True
|
||||
assert result["container"]["state"] == "running"
|
||||
|
||||
async def test_update_all_returns_empty_list(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""update_all with no containers to update."""
|
||||
_mock_graphql.return_value = {"docker": {"updateAllContainers": []}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="update_all")
|
||||
assert result["success"] is True
|
||||
assert result["containers"] == []
|
||||
|
||||
async def test_mutation_timeout(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mid-operation timeout during a docker mutation."""
|
||||
|
||||
cid = "a" * 64 + ":local"
|
||||
_mock_graphql.side_effect = [
|
||||
{"docker": {"containers": [{"id": cid, "names": ["plex"]}]}},
|
||||
TimeoutError("operation timed out"),
|
||||
]
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="timed out"):
|
||||
await tool_fn(action="start", container_id="plex")
|
||||
|
||||
|
||||
class TestDockerNetworkErrors:
|
||||
"""Tests for network-level failures in docker operations."""
|
||||
|
||||
async def test_list_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Connection refused when listing containers should be wrapped in ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Network connection error: [Errno 111] Connection refused"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Connection refused"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
async def test_start_http_401_unauthorized(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""HTTP 401 should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 401: Unauthorized")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="401"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
async def test_json_decode_error_on_list(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Invalid JSON response should be wrapped in ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Invalid JSON response from Unraid API: Expecting value: line 1 column 1"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Invalid JSON"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_health tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.health.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -104,9 +105,8 @@ class TestHealthActions:
|
||||
with patch(
|
||||
"unraid_mcp.tools.health._diagnose_subscriptions",
|
||||
side_effect=RuntimeError("broken"),
|
||||
):
|
||||
with pytest.raises(ToolError, match="broken"):
|
||||
await tool_fn(action="diagnose")
|
||||
), pytest.raises(ToolError, match="broken"):
|
||||
await tool_fn(action="diagnose")
|
||||
|
||||
async def test_diagnose_success(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Diagnose returns subscription status when modules are available."""
|
||||
@@ -120,17 +120,24 @@ class TestHealthActions:
|
||||
|
||||
async def test_diagnose_import_error_internal(self) -> None:
|
||||
"""_diagnose_subscriptions catches ImportError and returns error dict."""
|
||||
import builtins
|
||||
import sys
|
||||
|
||||
from unraid_mcp.tools.health import _diagnose_subscriptions
|
||||
|
||||
real_import = builtins.__import__
|
||||
# Remove cached subscription modules so the import is re-triggered
|
||||
cached = {k: v for k, v in sys.modules.items() if "unraid_mcp.subscriptions" in k}
|
||||
for k in cached:
|
||||
del sys.modules[k]
|
||||
|
||||
def fail_subscriptions(name, *args, **kwargs):
|
||||
if "subscriptions" in name:
|
||||
raise ImportError("no module")
|
||||
return real_import(name, *args, **kwargs)
|
||||
|
||||
with patch("builtins.__import__", side_effect=fail_subscriptions):
|
||||
result = await _diagnose_subscriptions()
|
||||
assert "error" in result
|
||||
try:
|
||||
# Replace the modules with objects that raise ImportError on access
|
||||
with patch.dict(sys.modules, {
|
||||
"unraid_mcp.subscriptions": None,
|
||||
"unraid_mcp.subscriptions.manager": None,
|
||||
"unraid_mcp.subscriptions.resources": None,
|
||||
}):
|
||||
result = await _diagnose_subscriptions()
|
||||
assert "error" in result
|
||||
finally:
|
||||
# Restore cached modules
|
||||
sys.modules.update(cached)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_info tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -12,6 +13,7 @@ from unraid_mcp.tools.info import (
|
||||
_process_system_info,
|
||||
)
|
||||
|
||||
|
||||
# --- Unit tests for helper functions ---
|
||||
|
||||
|
||||
@@ -115,7 +117,7 @@ class TestProcessArrayStatus:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.info.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -207,3 +209,32 @@ class TestUnraidInfoTool:
|
||||
result = await tool_fn(action="ups_devices")
|
||||
assert len(result["ups_devices"]) == 1
|
||||
assert result["ups_devices"][0]["model"] == "APC"
|
||||
|
||||
|
||||
class TestInfoNetworkErrors:
|
||||
"""Tests for network-level failures in info operations."""
|
||||
|
||||
async def test_overview_http_401_unauthorized(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""HTTP 401 Unauthorized should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 401: Unauthorized")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="401"):
|
||||
await tool_fn(action="overview")
|
||||
|
||||
async def test_overview_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Connection refused should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Network connection error: [Errno 111] Connection refused"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Connection refused"):
|
||||
await tool_fn(action="overview")
|
||||
|
||||
async def test_network_json_decode_error(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Invalid JSON from API should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Invalid JSON response from Unraid API: Expecting value"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Invalid JSON"):
|
||||
await tool_fn(action="network")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_keys tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.keys.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -88,3 +89,9 @@ class TestKeysActions:
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="delete", key_id="k:1", confirm=True)
|
||||
assert result["success"] is True
|
||||
|
||||
async def test_generic_exception_wraps(self, _mock_graphql: AsyncMock) -> None:
|
||||
_mock_graphql.side_effect = RuntimeError("connection lost")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="connection lost"):
|
||||
await tool_fn(action="list")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_notifications tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.notifications.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_rclone tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.rclone.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -18,18 +19,19 @@ def _make_tool():
|
||||
return make_tool_fn("unraid_mcp.tools.rclone", "register_rclone_tool", "unraid_rclone")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_mock_graphql")
|
||||
class TestRcloneValidation:
|
||||
async def test_delete_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_delete_requires_confirm(self) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="destructive"):
|
||||
await tool_fn(action="delete_remote", name="gdrive")
|
||||
|
||||
async def test_create_requires_fields(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_create_requires_fields(self) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="requires name"):
|
||||
await tool_fn(action="create_remote")
|
||||
|
||||
async def test_delete_requires_name(self, _mock_graphql: AsyncMock) -> None:
|
||||
async def test_delete_requires_name(self) -> None:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="name is required"):
|
||||
await tool_fn(action="delete_remote", confirm=True)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_storage tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -36,7 +37,7 @@ class TestFormatBytes:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.storage.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -98,7 +99,7 @@ class TestStorageActions:
|
||||
}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="disk_details", disk_id="d:1")
|
||||
assert result["summary"]["temperature"] == "35C"
|
||||
assert result["summary"]["temperature"] == "35\u00b0C"
|
||||
assert "1.00 GB" in result["summary"]["size_formatted"]
|
||||
|
||||
async def test_disk_details_not_found(self, _mock_graphql: AsyncMock) -> None:
|
||||
@@ -124,3 +125,32 @@ class TestStorageActions:
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="logs", log_path="/var/log/syslog")
|
||||
assert result["content"] == "log line"
|
||||
|
||||
|
||||
class TestStorageNetworkErrors:
|
||||
"""Tests for network-level failures in storage operations."""
|
||||
|
||||
async def test_logs_json_decode_error(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Invalid JSON response when reading logs should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Invalid JSON response from Unraid API: Expecting value: line 1 column 1"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Invalid JSON"):
|
||||
await tool_fn(action="logs", log_path="/var/log/syslog")
|
||||
|
||||
async def test_shares_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Connection refused when listing shares should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError(
|
||||
"Network connection error: [Errno 111] Connection refused"
|
||||
)
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Connection refused"):
|
||||
await tool_fn(action="shares")
|
||||
|
||||
async def test_disks_http_500(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""HTTP 500 when listing disks should propagate as ToolError."""
|
||||
_mock_graphql.side_effect = ToolError("HTTP error 500: Internal Server Error")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="HTTP error 500"):
|
||||
await tool_fn(action="disks")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_users tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.users.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for unraid_vm tool."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -9,7 +10,7 @@ from unraid_mcp.core.exceptions import ToolError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_graphql() -> AsyncMock:
|
||||
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
||||
with patch("unraid_mcp.tools.virtualization.make_graphql_request", new_callable=AsyncMock) as mock:
|
||||
yield mock
|
||||
|
||||
@@ -128,3 +129,53 @@ class TestVmActions:
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to start"):
|
||||
await tool_fn(action="start", vm_id="uuid-1")
|
||||
|
||||
|
||||
class TestVmMutationFailures:
|
||||
"""Tests for VM mutation responses indicating failure or unexpected shapes."""
|
||||
|
||||
async def test_start_mutation_empty_response(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Empty top-level response (no 'vm' key) should raise ToolError."""
|
||||
_mock_graphql.return_value = {}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to start"):
|
||||
await tool_fn(action="start", vm_id="uuid-1")
|
||||
|
||||
async def test_start_mutation_returns_false(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""VM start returning False should still succeed (the tool reports the raw value)."""
|
||||
_mock_graphql.return_value = {"vm": {"start": False}}
|
||||
tool_fn = _make_tool()
|
||||
result = await tool_fn(action="start", vm_id="uuid-1")
|
||||
assert result["success"] is False
|
||||
assert result["action"] == "start"
|
||||
|
||||
async def test_stop_mutation_returns_null(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""VM stop returning None in the field should raise ToolError (field not 'in' data)."""
|
||||
_mock_graphql.return_value = {"vm": {"stop": None}}
|
||||
tool_fn = _make_tool()
|
||||
# The check is `field in data["vm"]` — None is truthy for `in`, so it succeeds
|
||||
result = await tool_fn(action="stop", vm_id="uuid-1")
|
||||
assert result["success"] is None
|
||||
assert result["action"] == "stop"
|
||||
|
||||
async def test_force_stop_mutation_empty_vm_object(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Empty vm object with no matching field should raise ToolError."""
|
||||
_mock_graphql.return_value = {"vm": {}}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to force_stop"):
|
||||
await tool_fn(action="force_stop", vm_id="uuid-1", confirm=True)
|
||||
|
||||
async def test_reboot_mutation_vm_key_none(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""vm key being None should raise ToolError."""
|
||||
_mock_graphql.return_value = {"vm": None}
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="Failed to reboot"):
|
||||
await tool_fn(action="reboot", vm_id="uuid-1")
|
||||
|
||||
async def test_mutation_timeout(self, _mock_graphql: AsyncMock) -> None:
|
||||
"""Mid-operation timeout should be wrapped in ToolError."""
|
||||
|
||||
_mock_graphql.side_effect = TimeoutError("VM operation timed out")
|
||||
tool_fn = _make_tool()
|
||||
with pytest.raises(ToolError, match="timed out"):
|
||||
await tool_fn(action="start", vm_id="uuid-1")
|
||||
|
||||
Reference in New Issue
Block a user