mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-23 12:39:24 -07:00
_QueryCache.get/put/invalidate_all are async (use asyncio.Lock internally). Updated 6 sync test methods to async def with proper await calls so they test the actual async interface rather than calling unawaited coroutines.
721 lines
28 KiB
Python
721 lines
28 KiB
Python
"""Tests for unraid_mcp.core.client — GraphQL client infrastructure."""
|
|
|
|
import json
|
|
import time
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
|
import httpx
|
|
import pytest
|
|
|
|
from unraid_mcp.core.client import (
|
|
DEFAULT_TIMEOUT,
|
|
DISK_TIMEOUT,
|
|
_QueryCache,
|
|
_RateLimiter,
|
|
is_idempotent_error,
|
|
make_graphql_request,
|
|
redact_sensitive,
|
|
)
|
|
from unraid_mcp.core.exceptions import ToolError
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# is_idempotent_error
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestIsIdempotentError:
|
|
"""Verify all idempotent error pattern matches."""
|
|
|
|
def test_start_already_started(self) -> None:
|
|
assert is_idempotent_error("Container already started", "start") is True
|
|
|
|
def test_start_container_already_running(self) -> None:
|
|
assert is_idempotent_error("container already running", "start") is True
|
|
|
|
def test_start_http_code_304(self) -> None:
|
|
assert is_idempotent_error("HTTP code 304 - not modified", "start") is True
|
|
|
|
def test_stop_already_stopped(self) -> None:
|
|
assert is_idempotent_error("Container already stopped", "stop") is True
|
|
|
|
def test_stop_not_running(self) -> None:
|
|
assert is_idempotent_error("container not running", "stop") is True
|
|
|
|
def test_stop_http_code_304(self) -> None:
|
|
assert is_idempotent_error("HTTP code 304", "stop") is True
|
|
|
|
def test_start_unrelated_error(self) -> None:
|
|
assert is_idempotent_error("permission denied", "start") is False
|
|
|
|
def test_stop_unrelated_error(self) -> None:
|
|
assert is_idempotent_error("image not found", "stop") is False
|
|
|
|
def test_unknown_operation(self) -> None:
|
|
assert is_idempotent_error("already started", "restart") is False
|
|
|
|
def test_case_insensitive(self) -> None:
|
|
assert is_idempotent_error("ALREADY STARTED", "start") is True
|
|
assert is_idempotent_error("ALREADY STOPPED", "stop") is True
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# redact_sensitive
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestRedactSensitive:
|
|
"""Verify recursive redaction of sensitive keys."""
|
|
|
|
def test_flat_dict(self) -> None:
|
|
data = {"username": "admin", "password": "hunter2", "host": "10.0.0.1"}
|
|
result = redact_sensitive(data)
|
|
assert result["username"] == "admin"
|
|
assert result["password"] == "***"
|
|
assert result["host"] == "10.0.0.1"
|
|
|
|
def test_nested_dict(self) -> None:
|
|
data = {"config": {"apiKey": "abc123", "url": "http://host"}}
|
|
result = redact_sensitive(data)
|
|
assert result["config"]["apiKey"] == "***"
|
|
assert result["config"]["url"] == "http://host"
|
|
|
|
def test_list_of_dicts(self) -> None:
|
|
data = [{"token": "t1"}, {"name": "safe"}]
|
|
result = redact_sensitive(data)
|
|
assert result[0]["token"] == "***"
|
|
assert result[1]["name"] == "safe"
|
|
|
|
def test_deeply_nested(self) -> None:
|
|
data = {"a": {"b": {"c": {"secret": "deep"}}}}
|
|
result = redact_sensitive(data)
|
|
assert result["a"]["b"]["c"]["secret"] == "***"
|
|
|
|
def test_non_dict_passthrough(self) -> None:
|
|
assert redact_sensitive("plain_string") == "plain_string"
|
|
assert redact_sensitive(42) == 42
|
|
assert redact_sensitive(None) is None
|
|
|
|
def test_case_insensitive_keys(self) -> None:
|
|
data = {"Password": "p1", "TOKEN": "t1", "ApiKey": "k1", "Secret": "s1", "Key": "x1"}
|
|
result = redact_sensitive(data)
|
|
for v in result.values():
|
|
assert v == "***"
|
|
|
|
def test_compound_key_names(self) -> None:
|
|
"""Keys containing sensitive substrings (e.g. 'user_password') are redacted."""
|
|
data = {
|
|
"user_password": "p1",
|
|
"api_key_value": "k1",
|
|
"auth_token_expiry": "t1",
|
|
"client_secret_id": "s1",
|
|
"username": "safe",
|
|
"host": "safe",
|
|
}
|
|
result = redact_sensitive(data)
|
|
assert result["user_password"] == "***"
|
|
assert result["api_key_value"] == "***"
|
|
assert result["auth_token_expiry"] == "***"
|
|
assert result["client_secret_id"] == "***"
|
|
assert result["username"] == "safe"
|
|
assert result["host"] == "safe"
|
|
|
|
def test_mixed_list_content(self) -> None:
|
|
data = [{"key": "val"}, "string", 123, [{"token": "inner"}]]
|
|
result = redact_sensitive(data)
|
|
assert result[0]["key"] == "***"
|
|
assert result[1] == "string"
|
|
assert result[2] == 123
|
|
assert result[3][0]["token"] == "***"
|
|
|
|
def test_new_sensitive_keys_are_redacted(self) -> None:
|
|
"""PR-added keys: authorization, cookie, session, credential, passphrase, jwt."""
|
|
data = {
|
|
"authorization": "Bearer token123",
|
|
"cookie": "session=abc",
|
|
"jwt": "eyJ...",
|
|
"credential": "secret_cred",
|
|
"passphrase": "hunter2",
|
|
"session": "sess_id",
|
|
}
|
|
result = redact_sensitive(data)
|
|
for key, val in result.items():
|
|
assert val == "***", f"Key '{key}' was not redacted"
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Timeout constants
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestTimeoutConstants:
|
|
def test_default_timeout_read(self) -> None:
|
|
assert DEFAULT_TIMEOUT.read == 30.0
|
|
|
|
def test_default_timeout_connect(self) -> None:
|
|
assert DEFAULT_TIMEOUT.connect == 5.0
|
|
|
|
def test_disk_timeout_read(self) -> None:
|
|
assert DISK_TIMEOUT.read == 90.0
|
|
|
|
def test_disk_timeout_connect(self) -> None:
|
|
assert DISK_TIMEOUT.connect == 5.0
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# make_graphql_request — success paths
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestMakeGraphQLRequestSuccess:
|
|
"""Test successful request paths."""
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def _patch_config(self):
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
|
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
|
):
|
|
yield
|
|
|
|
async def test_simple_query(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"data": {"info": {"os": "Unraid"}}}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request("{ info { os } }")
|
|
assert result == {"info": {"os": "Unraid"}}
|
|
|
|
async def test_query_with_variables(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"data": {"container": {"name": "plex"}}}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request(
|
|
"query ($id: String!) { container(id: $id) { name } }",
|
|
variables={"id": "abc123"},
|
|
)
|
|
assert result == {"container": {"name": "plex"}}
|
|
# Verify variables were passed in the payload
|
|
call_kwargs = mock_client.post.call_args
|
|
assert call_kwargs.kwargs["json"]["variables"] == {"id": "abc123"}
|
|
|
|
async def test_custom_timeout_passed(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"data": {}}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
custom_timeout = httpx.Timeout(10.0, read=90.0)
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
await make_graphql_request("{ info }", custom_timeout=custom_timeout)
|
|
call_kwargs = mock_client.post.call_args
|
|
assert call_kwargs.kwargs["timeout"] is custom_timeout
|
|
|
|
async def test_empty_data_returns_empty_dict(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"data": None}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request("{ info }")
|
|
assert result == {}
|
|
|
|
async def test_missing_data_key_returns_empty_dict(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request("{ info }")
|
|
assert result == {}
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# make_graphql_request — error paths
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestMakeGraphQLRequestErrors:
|
|
"""Test error handling in make_graphql_request."""
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def _patch_config(self):
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
|
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
|
):
|
|
yield
|
|
|
|
async def test_missing_api_url(self) -> None:
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_URL", ""),
|
|
pytest.raises(ToolError, match="UNRAID_API_URL not configured"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_missing_api_key(self) -> None:
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_KEY", ""),
|
|
pytest.raises(ToolError, match="UNRAID_API_KEY not configured"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_http_401_error(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.status_code = 401
|
|
mock_response.text = "Unauthorized"
|
|
http_error = httpx.HTTPStatusError(
|
|
"401 Unauthorized", request=MagicMock(), response=mock_response
|
|
)
|
|
mock_response.raise_for_status.side_effect = http_error
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Unraid API returned HTTP 401"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_http_500_error(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.status_code = 500
|
|
mock_response.text = "Internal Server Error"
|
|
http_error = httpx.HTTPStatusError(
|
|
"500 Internal Server Error", request=MagicMock(), response=mock_response
|
|
)
|
|
mock_response.raise_for_status.side_effect = http_error
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Unraid API returned HTTP 500"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_http_503_error(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.status_code = 503
|
|
mock_response.text = "Service Unavailable"
|
|
http_error = httpx.HTTPStatusError(
|
|
"503 Service Unavailable", request=MagicMock(), response=mock_response
|
|
)
|
|
mock_response.raise_for_status.side_effect = http_error
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Unraid API returned HTTP 503"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_network_connection_refused(self) -> None:
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = httpx.ConnectError("Connection refused")
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Network error connecting to Unraid API"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_network_timeout(self) -> None:
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = httpx.ReadTimeout("Read timed out")
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Network error connecting to Unraid API"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_json_decode_error(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.side_effect = json.JSONDecodeError("Expecting value", "", 0)
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match=r"invalid response.*not valid JSON"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# make_graphql_request — GraphQL error handling
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestGraphQLErrorHandling:
|
|
"""Test GraphQL-level error parsing and idempotent handling."""
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def _patch_config(self):
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
|
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
|
):
|
|
yield
|
|
|
|
async def test_graphql_error_raises_tool_error(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"errors": [{"message": "Field 'bogus' not found"}]}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Field 'bogus' not found"),
|
|
):
|
|
await make_graphql_request("{ bogus }")
|
|
|
|
async def test_multiple_graphql_errors_joined(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {
|
|
"errors": [
|
|
{"message": "Error one"},
|
|
{"message": "Error two"},
|
|
]
|
|
}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Error one; Error two"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_idempotent_start_returns_success(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"errors": [{"message": "Container already running"}]}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request(
|
|
'mutation { docker { start(id: "x") } }',
|
|
operation_context={"operation": "start"},
|
|
)
|
|
assert result["idempotent_success"] is True
|
|
assert result["operation"] == "start"
|
|
|
|
async def test_idempotent_stop_returns_success(self) -> None:
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"errors": [{"message": "Container not running"}]}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request(
|
|
'mutation { docker { stop(id: "x") } }',
|
|
operation_context={"operation": "stop"},
|
|
)
|
|
assert result["idempotent_success"] is True
|
|
assert result["operation"] == "stop"
|
|
|
|
async def test_non_idempotent_error_with_context_raises(self) -> None:
|
|
"""An error that doesn't match idempotent patterns still raises even with context."""
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {"errors": [{"message": "Permission denied"}]}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="Permission denied"),
|
|
):
|
|
await make_graphql_request(
|
|
'mutation { docker { start(id: "x") } }',
|
|
operation_context={"operation": "start"},
|
|
)
|
|
|
|
async def test_graphql_error_without_message_key(self) -> None:
|
|
"""Error objects without a 'message' key fall back to str()."""
|
|
mock_response = MagicMock()
|
|
mock_response.raise_for_status = MagicMock()
|
|
mock_response.json.return_value = {
|
|
"errors": [{"code": "UNKNOWN", "detail": "something broke"}]
|
|
}
|
|
|
|
mock_client = AsyncMock()
|
|
mock_client.post.return_value = mock_response
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="GraphQL API error"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# _RateLimiter
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestRateLimiter:
|
|
"""Unit tests for the token bucket rate limiter."""
|
|
|
|
async def test_acquire_consumes_one_token(self) -> None:
|
|
limiter = _RateLimiter(max_tokens=10, refill_rate=1.0)
|
|
initial = limiter.tokens
|
|
await limiter.acquire()
|
|
assert limiter.tokens == pytest.approx(initial - 1, abs=1e-3)
|
|
|
|
async def test_acquire_succeeds_when_tokens_available(self) -> None:
|
|
limiter = _RateLimiter(max_tokens=5, refill_rate=1.0)
|
|
# Should complete without sleeping
|
|
for _ in range(5):
|
|
await limiter.acquire()
|
|
# _refill() runs during each acquire() call and adds a tiny time-based
|
|
# amount; check < 1.0 (not enough for another immediate request) rather
|
|
# than == 0.0 to avoid flakiness from timing.
|
|
assert limiter.tokens < 1.0
|
|
|
|
async def test_tokens_do_not_exceed_max(self) -> None:
|
|
limiter = _RateLimiter(max_tokens=10, refill_rate=1.0)
|
|
# Force refill with large elapsed time
|
|
limiter.last_refill = time.monotonic() - 100.0 # 100 seconds ago
|
|
limiter._refill()
|
|
assert limiter.tokens == 10.0 # Capped at max_tokens
|
|
|
|
async def test_refill_adds_tokens_based_on_elapsed(self) -> None:
|
|
limiter = _RateLimiter(max_tokens=100, refill_rate=10.0)
|
|
limiter.tokens = 0.0
|
|
limiter.last_refill = time.monotonic() - 1.0 # 1 second ago
|
|
limiter._refill()
|
|
# Should have refilled ~10 tokens (10.0 rate * 1.0 sec)
|
|
assert 9.5 < limiter.tokens < 10.5
|
|
|
|
async def test_acquire_sleeps_when_no_tokens(self) -> None:
|
|
"""When tokens are exhausted, acquire should sleep before consuming."""
|
|
limiter = _RateLimiter(max_tokens=1, refill_rate=1.0)
|
|
limiter.tokens = 0.0
|
|
|
|
sleep_calls = []
|
|
|
|
async def fake_sleep(duration: float) -> None:
|
|
sleep_calls.append(duration)
|
|
# Simulate refill by advancing last_refill so tokens replenish
|
|
limiter.tokens = 1.0
|
|
limiter.last_refill = time.monotonic()
|
|
|
|
with patch("unraid_mcp.core.client.asyncio.sleep", side_effect=fake_sleep):
|
|
await limiter.acquire()
|
|
|
|
assert len(sleep_calls) == 1
|
|
assert sleep_calls[0] > 0
|
|
|
|
async def test_default_params_match_api_limits(self) -> None:
|
|
"""Default rate limiter must use 90 tokens at 9.0/sec (10% headroom from 100/10s)."""
|
|
limiter = _RateLimiter()
|
|
assert limiter.max_tokens == 90
|
|
assert limiter.refill_rate == 9.0
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# _QueryCache
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestQueryCache:
|
|
"""Unit tests for the TTL query cache."""
|
|
|
|
async def test_miss_on_empty_cache(self) -> None:
|
|
cache = _QueryCache()
|
|
assert await cache.get("{ info }", None) is None
|
|
|
|
async def test_put_and_get_hit(self) -> None:
|
|
cache = _QueryCache()
|
|
data = {"result": "ok"}
|
|
await cache.put("GetNetworkConfig { }", None, data)
|
|
result = await cache.get("GetNetworkConfig { }", None)
|
|
assert result == data
|
|
|
|
async def test_expired_entry_returns_none(self) -> None:
|
|
cache = _QueryCache()
|
|
data = {"result": "ok"}
|
|
await cache.put("GetNetworkConfig { }", None, data)
|
|
# Manually expire the entry
|
|
key = cache._cache_key("GetNetworkConfig { }", None)
|
|
cache._store[key] = (time.monotonic() - 1.0, data) # expired 1 sec ago
|
|
assert await cache.get("GetNetworkConfig { }", None) is None
|
|
|
|
async def test_invalidate_all_clears_store(self) -> None:
|
|
cache = _QueryCache()
|
|
await cache.put("GetNetworkConfig { }", None, {"x": 1})
|
|
await cache.put("GetOwner { }", None, {"y": 2})
|
|
assert len(cache._store) == 2
|
|
await cache.invalidate_all()
|
|
assert len(cache._store) == 0
|
|
|
|
async def test_variables_affect_cache_key(self) -> None:
|
|
"""Different variables produce different cache keys."""
|
|
cache = _QueryCache()
|
|
q = "GetNetworkConfig($id: ID!) { network(id: $id) { name } }"
|
|
await cache.put(q, {"id": "1"}, {"name": "eth0"})
|
|
await cache.put(q, {"id": "2"}, {"name": "eth1"})
|
|
assert await cache.get(q, {"id": "1"}) == {"name": "eth0"}
|
|
assert await cache.get(q, {"id": "2"}) == {"name": "eth1"}
|
|
|
|
def test_is_cacheable_returns_true_for_known_prefixes(self) -> None:
|
|
assert _QueryCache.is_cacheable("GetNetworkConfig { ... }") is True
|
|
assert _QueryCache.is_cacheable("GetRegistrationInfo { ... }") is True
|
|
assert _QueryCache.is_cacheable("GetOwner { ... }") is True
|
|
assert _QueryCache.is_cacheable("GetFlash { ... }") is True
|
|
|
|
def test_is_cacheable_returns_false_for_mutations(self) -> None:
|
|
assert _QueryCache.is_cacheable('mutation { docker { start(id: "x") } }') is False
|
|
|
|
def test_is_cacheable_returns_false_for_unlisted_queries(self) -> None:
|
|
assert _QueryCache.is_cacheable("{ docker { containers { id } } }") is False
|
|
assert _QueryCache.is_cacheable("{ info { os } }") is False
|
|
|
|
def test_is_cacheable_mutation_check_is_prefix(self) -> None:
|
|
"""Queries that start with 'mutation' after whitespace are not cacheable."""
|
|
assert _QueryCache.is_cacheable(" mutation { ... }") is False
|
|
|
|
def test_is_cacheable_with_explicit_query_keyword(self) -> None:
|
|
"""Operation names after explicit 'query' keyword must be recognized."""
|
|
assert _QueryCache.is_cacheable("query GetNetworkConfig { network { name } }") is True
|
|
assert _QueryCache.is_cacheable("query GetOwner { owner { name } }") is True
|
|
|
|
def test_is_cacheable_anonymous_query_returns_false(self) -> None:
|
|
"""Anonymous 'query { ... }' has no operation name — must not be cached."""
|
|
assert _QueryCache.is_cacheable("query { network { name } }") is False
|
|
|
|
async def test_expired_entry_removed_from_store(self) -> None:
|
|
"""Accessing an expired entry should remove it from the internal store."""
|
|
cache = _QueryCache()
|
|
await cache.put("GetOwner { }", None, {"owner": "root"})
|
|
key = cache._cache_key("GetOwner { }", None)
|
|
cache._store[key] = (time.monotonic() - 1.0, {"owner": "root"})
|
|
assert key in cache._store
|
|
await cache.get("GetOwner { }", None) # triggers deletion
|
|
assert key not in cache._store
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# make_graphql_request — 429 retry behavior
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class TestRateLimitRetry:
|
|
"""Tests for the 429 retry loop in make_graphql_request."""
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def _patch_config(self):
|
|
with (
|
|
patch("unraid_mcp.core.client.UNRAID_API_URL", "https://unraid.local/graphql"),
|
|
patch("unraid_mcp.core.client.UNRAID_API_KEY", "test-key"),
|
|
patch("unraid_mcp.core.client.asyncio.sleep", new_callable=AsyncMock),
|
|
):
|
|
yield
|
|
|
|
def _make_429_response(self) -> MagicMock:
|
|
resp = MagicMock()
|
|
resp.status_code = 429
|
|
resp.raise_for_status = MagicMock()
|
|
return resp
|
|
|
|
def _make_ok_response(self, data: dict) -> MagicMock:
|
|
resp = MagicMock()
|
|
resp.status_code = 200
|
|
resp.raise_for_status = MagicMock()
|
|
resp.json.return_value = {"data": data}
|
|
return resp
|
|
|
|
async def test_single_429_then_success_retries(self) -> None:
|
|
"""One 429 followed by a success should return the data."""
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = [
|
|
self._make_429_response(),
|
|
self._make_ok_response({"info": {"os": "Unraid"}}),
|
|
]
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request("{ info { os } }")
|
|
|
|
assert result == {"info": {"os": "Unraid"}}
|
|
assert mock_client.post.call_count == 2
|
|
|
|
async def test_two_429s_then_success(self) -> None:
|
|
"""Two 429s followed by success returns data after 2 retries."""
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = [
|
|
self._make_429_response(),
|
|
self._make_429_response(),
|
|
self._make_ok_response({"x": 1}),
|
|
]
|
|
|
|
with patch("unraid_mcp.core.client.get_http_client", return_value=mock_client):
|
|
result = await make_graphql_request("{ x }")
|
|
|
|
assert result == {"x": 1}
|
|
assert mock_client.post.call_count == 3
|
|
|
|
async def test_three_429s_raises_tool_error(self) -> None:
|
|
"""Three consecutive 429s (all retries exhausted) raises ToolError."""
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = [
|
|
self._make_429_response(),
|
|
self._make_429_response(),
|
|
self._make_429_response(),
|
|
]
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="rate limiting"),
|
|
):
|
|
await make_graphql_request("{ info }")
|
|
|
|
async def test_rate_limit_error_message_advises_wait(self) -> None:
|
|
"""The ToolError message should tell the user to wait ~10 seconds."""
|
|
mock_client = AsyncMock()
|
|
mock_client.post.side_effect = [
|
|
self._make_429_response(),
|
|
self._make_429_response(),
|
|
self._make_429_response(),
|
|
]
|
|
|
|
with (
|
|
patch("unraid_mcp.core.client.get_http_client", return_value=mock_client),
|
|
pytest.raises(ToolError, match="10 seconds"),
|
|
):
|
|
await make_graphql_request("{ info }")
|