Files
unraid-mcp/tests/test_health.py
Jacob Magar 2697c269a3 chore: enhance project metadata, tooling, and documentation
**Project Configuration:**
- Enhance pyproject.toml with comprehensive metadata, keywords, and classifiers
- Add LICENSE file (MIT) for proper open-source distribution
- Add PUBLISHING.md with comprehensive publishing guidelines
- Update .gitignore to exclude tool artifacts (.cache, .pytest_cache, .ruff_cache, .ty_cache)
- Ignore documentation working directories (.docs, .full-review, docs/plans, docs/sessions)

**Documentation:**
- Add extensive Unraid API research documentation
  - API source code analysis and resolver mapping
  - Competitive analysis and feature gap assessment
  - Release notes analysis (7.0.0, 7.1.0, 7.2.0)
  - Connect platform overview and remote access documentation
- Document known API patterns, limitations, and edge cases

**Testing & Code Quality:**
- Expand test coverage across all tool modules
- Add destructive action confirmation tests
- Improve test assertions and error case validation
- Refine type annotations for better static analysis

**Tool Improvements:**
- Enhance error handling consistency across all tools
- Improve type safety with explicit type annotations
- Refine GraphQL query construction patterns
- Better handling of optional parameters and edge cases

This commit prepares the project for v0.2.0 release with improved
metadata, comprehensive documentation, and enhanced code quality.

Co-authored-by: Claude <noreply@anthropic.com>
2026-02-15 15:32:09 -05:00

137 lines
5.3 KiB
Python

"""Tests for unraid_health tool."""
from unittest.mock import AsyncMock, patch
import pytest
from conftest import make_tool_fn
from unraid_mcp.core.exceptions import ToolError
@pytest.fixture
def _mock_graphql() -> AsyncMock:
with patch("unraid_mcp.tools.health.make_graphql_request", new_callable=AsyncMock) as mock:
yield mock
def _make_tool():
return make_tool_fn("unraid_mcp.tools.health", "register_health_tool", "unraid_health")
class TestHealthValidation:
async def test_invalid_action(self, _mock_graphql: AsyncMock) -> None:
tool_fn = _make_tool()
with pytest.raises(ToolError, match="Invalid action"):
await tool_fn(action="invalid")
class TestHealthActions:
async def test_test_connection(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {"online": True}
tool_fn = _make_tool()
result = await tool_fn(action="test_connection")
assert result["status"] == "connected"
assert result["online"] is True
assert "latency_ms" in result
async def test_check_healthy(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"info": {
"machineId": "abc123",
"time": "2026-02-08T12:00:00Z",
"versions": {"unraid": "7.2.0"},
"os": {"uptime": 86400},
},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 0, "warning": 0, "total": 3}}
},
"docker": {
"containers": [{"id": "c1", "state": "running", "status": "Up 2 days"}]
},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "healthy"
assert "api_latency_ms" in result
async def test_check_warning_on_alerts(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {
"info": {"machineId": "abc", "versions": {"unraid": "7.2"}, "os": {"uptime": 100}},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 3, "warning": 0, "total": 3}}
},
"docker": {"containers": []},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "warning"
assert any("alert" in i for i in result.get("issues", []))
async def test_check_no_data(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.return_value = {}
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "unhealthy"
async def test_check_api_error(self, _mock_graphql: AsyncMock) -> None:
_mock_graphql.side_effect = Exception("Connection refused")
tool_fn = _make_tool()
result = await tool_fn(action="check")
assert result["status"] == "unhealthy"
assert "Connection refused" in result["error"]
async def test_check_severity_never_downgrades(self, _mock_graphql: AsyncMock) -> None:
"""Degraded from missing info should not be overwritten by warning from alerts."""
_mock_graphql.return_value = {
"info": {},
"array": {"state": "STARTED"},
"notifications": {
"overview": {"unread": {"alert": 5, "warning": 0, "total": 5}}
},
"docker": {"containers": []},
}
tool_fn = _make_tool()
result = await tool_fn(action="check")
# Missing info escalates to "degraded"; alerts only escalate to "warning"
# Severity should stay at "degraded" (not downgrade to "warning")
assert result["status"] == "degraded"
async def test_diagnose_wraps_exception(self, _mock_graphql: AsyncMock) -> None:
"""When _diagnose_subscriptions raises, tool wraps in ToolError."""
tool_fn = _make_tool()
with patch(
"unraid_mcp.tools.health._diagnose_subscriptions",
side_effect=RuntimeError("broken"),
):
with pytest.raises(ToolError, match="broken"):
await tool_fn(action="diagnose")
async def test_diagnose_success(self, _mock_graphql: AsyncMock) -> None:
"""Diagnose returns subscription status when modules are available."""
tool_fn = _make_tool()
mock_status = {
"cpu_sub": {"runtime": {"connection_state": "connected", "last_error": None}},
}
with patch("unraid_mcp.tools.health._diagnose_subscriptions", return_value=mock_status):
result = await tool_fn(action="diagnose")
assert "cpu_sub" in result
async def test_diagnose_import_error_internal(self) -> None:
"""_diagnose_subscriptions catches ImportError and returns error dict."""
import builtins
from unraid_mcp.tools.health import _diagnose_subscriptions
real_import = builtins.__import__
def fail_subscriptions(name, *args, **kwargs):
if "subscriptions" in name:
raise ImportError("no module")
return real_import(name, *args, **kwargs)
with patch("builtins.__import__", side_effect=fail_subscriptions):
result = await _diagnose_subscriptions()
assert "error" in result