mirror of
https://github.com/jmagar/unraid-mcp.git
synced 2026-03-23 12:39:24 -07:00
Security: - Remove /mnt/ from _ALLOWED_LOG_PREFIXES to prevent Unraid share exposure - Add early .. detection for disk/logs and live/log_tail path validation - Add /boot/ prefix restriction for flash_backup source_path - Use hmac.compare_digest for timing-safe API key verification in server.py - Gate include_traceback on DEBUG log level (no tracebacks in production) Correctness: - Re-raise CredentialsNotConfiguredError in health check instead of swallowing - Fix ups_device query (remove non-existent nominalPower/currentPower fields) Best practices (BP-01, BP-05, BP-06): - Add # noqa: ASYNC109 to timeout params in _handle_live and unraid() - Fix start_array* → start_array in docstring (not in ARRAY_DESTRUCTIVE) - Remove from __future__ import annotations from snapshot.py - Replace import-time UNRAID_API_KEY/URL bindings with _settings.ATTR pattern in manager.py, snapshot.py, utils.py, diagnostics.py — fixes stale binding after apply_runtime_config() post-elicitation (BP-05) CI/CD: - Add .github/workflows/ci.yml (5-job pipeline: lint, typecheck, test, version-sync, audit) - Add fail_under = 80 to [tool.coverage.report] - Add version sync check to scripts/validate-marketplace.sh Documentation: - Sync plugin.json version 1.1.1 → 1.1.2 with pyproject.toml - Update CLAUDE.md: 3 tools, system domain count 18, scripts comment fix - Update README.md: 3 tools, security notes - Update docs/AUTHENTICATION.md: H1 title fix - Add UNRAID_CREDENTIALS_DIR to .env.example Bump: 1.1.1 → 1.1.2 Co-Authored-By: Claude <noreply@anthropic.com>
346 lines
14 KiB
Python
346 lines
14 KiB
Python
"""Tests for disk subactions of the consolidated unraid tool."""
|
|
|
|
from collections.abc import Generator
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
import pytest
|
|
from conftest import make_tool_fn
|
|
|
|
from unraid_mcp.core.exceptions import ToolError
|
|
from unraid_mcp.core.utils import format_bytes, format_kb, safe_get
|
|
|
|
|
|
# --- Unit tests for helpers ---
|
|
|
|
|
|
class TestFormatBytes:
|
|
def test_none(self) -> None:
|
|
assert format_bytes(None) == "N/A"
|
|
|
|
def test_bytes(self) -> None:
|
|
assert format_bytes(512) == "512.00 B"
|
|
|
|
def test_kilobytes(self) -> None:
|
|
assert format_bytes(2048) == "2.00 KB"
|
|
|
|
def test_megabytes(self) -> None:
|
|
assert format_bytes(1048576) == "1.00 MB"
|
|
|
|
def test_gigabytes(self) -> None:
|
|
assert format_bytes(1073741824) == "1.00 GB"
|
|
|
|
def test_terabytes(self) -> None:
|
|
assert format_bytes(1099511627776) == "1.00 TB"
|
|
|
|
|
|
# --- Integration tests ---
|
|
|
|
|
|
@pytest.fixture
|
|
def _mock_graphql() -> Generator[AsyncMock, None, None]:
|
|
with patch("unraid_mcp.tools.unraid.make_graphql_request", new_callable=AsyncMock) as mock:
|
|
yield mock
|
|
|
|
|
|
def _make_tool():
|
|
return make_tool_fn("unraid_mcp.tools.unraid", "register_unraid_tool", "unraid")
|
|
|
|
|
|
class TestStorageValidation:
|
|
async def test_disk_details_requires_disk_id(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="disk_id"):
|
|
await tool_fn(action="disk", subaction="disk_details")
|
|
|
|
async def test_logs_requires_log_path(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="log_path"):
|
|
await tool_fn(action="disk", subaction="logs")
|
|
|
|
async def test_logs_rejects_invalid_path(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="log_path must start with"):
|
|
await tool_fn(action="disk", subaction="logs", log_path="/etc/shadow")
|
|
|
|
async def test_logs_rejects_path_traversal(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
# Traversal that escapes /var/log/ — detected by early .. check
|
|
with pytest.raises(ToolError, match="log_path"):
|
|
await tool_fn(action="disk", subaction="logs", log_path="/var/log/../../etc/shadow")
|
|
# Traversal via .. — detected by early .. check
|
|
with pytest.raises(ToolError, match="log_path"):
|
|
await tool_fn(action="disk", subaction="logs", log_path="/var/log/../etc/passwd")
|
|
|
|
async def test_logs_allows_valid_paths(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog")
|
|
assert result["content"] == "ok"
|
|
|
|
async def test_logs_tail_lines_too_large(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="tail_lines must be between"):
|
|
await tool_fn(
|
|
action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=10_001
|
|
)
|
|
|
|
async def test_logs_tail_lines_zero_rejected(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="tail_lines must be between"):
|
|
await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=0)
|
|
|
|
async def test_logs_tail_lines_at_max_accepted(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"logFile": {"path": "/var/log/syslog", "content": "ok"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="disk", subaction="logs", log_path="/var/log/syslog", tail_lines=10_000
|
|
)
|
|
assert result["content"] == "ok"
|
|
|
|
async def test_non_logs_action_ignores_tail_lines_validation(
|
|
self, _mock_graphql: AsyncMock
|
|
) -> None:
|
|
_mock_graphql.return_value = {"shares": []}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="shares", tail_lines=0)
|
|
assert result["shares"] == []
|
|
|
|
|
|
class TestFormatKb:
|
|
def test_none_returns_na(self) -> None:
|
|
assert format_kb(None) == "N/A"
|
|
|
|
def test_invalid_string_returns_na(self) -> None:
|
|
assert format_kb("not-a-number") == "N/A"
|
|
|
|
def test_kilobytes_range(self) -> None:
|
|
assert format_kb(512) == "512.00 KB"
|
|
|
|
def test_megabytes_range(self) -> None:
|
|
assert format_kb(2048) == "2.00 MB"
|
|
|
|
def test_gigabytes_range(self) -> None:
|
|
assert format_kb(1_048_576) == "1.00 GB"
|
|
|
|
def test_terabytes_range(self) -> None:
|
|
assert format_kb(1_073_741_824) == "1.00 TB"
|
|
|
|
def test_boundary_exactly_1024_kb(self) -> None:
|
|
# 1024 KB = 1 MB
|
|
assert format_kb(1024) == "1.00 MB"
|
|
|
|
|
|
class TestSafeGet:
|
|
def test_simple_key_access(self) -> None:
|
|
assert safe_get({"a": 1}, "a") == 1
|
|
|
|
def test_nested_key_access(self) -> None:
|
|
assert safe_get({"a": {"b": "val"}}, "a", "b") == "val"
|
|
|
|
def test_missing_key_returns_none(self) -> None:
|
|
assert safe_get({"a": 1}, "missing") is None
|
|
|
|
def test_none_intermediate_returns_default(self) -> None:
|
|
assert safe_get({"a": None}, "a", "b") is None
|
|
|
|
def test_custom_default_returned(self) -> None:
|
|
assert safe_get({}, "x", default="fallback") == "fallback"
|
|
|
|
def test_non_dict_intermediate_returns_default(self) -> None:
|
|
assert safe_get({"a": "string"}, "a", "b") is None
|
|
|
|
def test_empty_list_default(self) -> None:
|
|
result = safe_get({}, "missing", default=[])
|
|
assert result == []
|
|
|
|
def test_zero_value_not_replaced_by_default(self) -> None:
|
|
assert safe_get({"temp": 0}, "temp", default="N/A") == 0
|
|
|
|
def test_false_value_not_replaced_by_default(self) -> None:
|
|
assert safe_get({"active": False}, "active", default=True) is False
|
|
|
|
def test_empty_string_not_replaced_by_default(self) -> None:
|
|
assert safe_get({"name": ""}, "name", default="unknown") == ""
|
|
|
|
|
|
class TestStorageActions:
|
|
async def test_shares(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"shares": [{"id": "s:1", "name": "media"}, {"id": "s:2", "name": "backups"}]
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="shares")
|
|
assert len(result["shares"]) == 2
|
|
|
|
async def test_disks(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"disks": [{"id": "d:1", "device": "sda"}]}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="disks")
|
|
assert len(result["disks"]) == 1
|
|
|
|
async def test_disk_details(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"disk": {
|
|
"id": "d:1",
|
|
"device": "sda",
|
|
"name": "WD",
|
|
"serialNum": "SN1",
|
|
"size": 1073741824,
|
|
"temperature": 35,
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="disk_details", disk_id="d:1")
|
|
assert result["summary"]["temperature"] == "35\u00b0C"
|
|
assert "1.00 GB" in result["summary"]["size_formatted"]
|
|
|
|
async def test_disk_details_temperature_zero(self, _mock_graphql: AsyncMock) -> None:
|
|
"""Temperature of 0 should display as '0\u00b0C', not 'N/A'."""
|
|
_mock_graphql.return_value = {
|
|
"disk": {
|
|
"id": "d:1",
|
|
"device": "sda",
|
|
"name": "WD",
|
|
"serialNum": "SN1",
|
|
"size": 1073741824,
|
|
"temperature": 0,
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="disk_details", disk_id="d:1")
|
|
assert result["summary"]["temperature"] == "0\u00b0C"
|
|
|
|
async def test_disk_details_temperature_null(self, _mock_graphql: AsyncMock) -> None:
|
|
"""Null temperature should display as 'N/A'."""
|
|
_mock_graphql.return_value = {
|
|
"disk": {
|
|
"id": "d:1",
|
|
"device": "sda",
|
|
"name": "WD",
|
|
"serialNum": "SN1",
|
|
"size": 1073741824,
|
|
"temperature": None,
|
|
}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="disk_details", disk_id="d:1")
|
|
assert result["summary"]["temperature"] == "N/A"
|
|
|
|
async def test_logs_null_log_file(self, _mock_graphql: AsyncMock) -> None:
|
|
"""logFile being null should return an empty dict."""
|
|
_mock_graphql.return_value = {"logFile": None}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog")
|
|
assert result == {}
|
|
|
|
async def test_disk_details_not_found(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"disk": None}
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="not found"):
|
|
await tool_fn(action="disk", subaction="disk_details", disk_id="d:missing")
|
|
|
|
async def test_log_files(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"logFiles": [{"name": "syslog", "path": "/var/log/syslog"}]}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="log_files")
|
|
assert len(result["log_files"]) == 1
|
|
|
|
async def test_logs(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {
|
|
"logFile": {"path": "/var/log/syslog", "content": "log line", "totalLines": 1}
|
|
}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog")
|
|
assert result["content"] == "log line"
|
|
|
|
|
|
class TestStorageNetworkErrors:
|
|
"""Tests for network-level failures in storage operations."""
|
|
|
|
async def test_logs_json_decode_error(self, _mock_graphql: AsyncMock) -> None:
|
|
"""Invalid JSON response when reading logs should propagate as ToolError."""
|
|
_mock_graphql.side_effect = ToolError(
|
|
"Invalid JSON response from Unraid API: Expecting value: line 1 column 1"
|
|
)
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Invalid JSON"):
|
|
await tool_fn(action="disk", subaction="logs", log_path="/var/log/syslog")
|
|
|
|
async def test_shares_connection_refused(self, _mock_graphql: AsyncMock) -> None:
|
|
"""Connection refused when listing shares should propagate as ToolError."""
|
|
_mock_graphql.side_effect = ToolError(
|
|
"Network connection error: [Errno 111] Connection refused"
|
|
)
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="Connection refused"):
|
|
await tool_fn(action="disk", subaction="shares")
|
|
|
|
async def test_disks_http_500(self, _mock_graphql: AsyncMock) -> None:
|
|
"""HTTP 500 when listing disks should propagate as ToolError."""
|
|
_mock_graphql.side_effect = ToolError("HTTP error 500: Internal Server Error")
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="HTTP error 500"):
|
|
await tool_fn(action="disk", subaction="disks")
|
|
|
|
|
|
class TestStorageFlashBackup:
|
|
async def test_flash_backup_requires_confirm(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="not confirmed"):
|
|
await tool_fn(
|
|
action="disk",
|
|
subaction="flash_backup",
|
|
remote_name="r",
|
|
source_path="/boot",
|
|
destination_path="r:b",
|
|
)
|
|
|
|
async def test_flash_backup_requires_remote_name(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="remote_name"):
|
|
await tool_fn(action="disk", subaction="flash_backup", confirm=True)
|
|
|
|
async def test_flash_backup_requires_source_path(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="source_path"):
|
|
await tool_fn(action="disk", subaction="flash_backup", confirm=True, remote_name="r")
|
|
|
|
async def test_flash_backup_requires_destination_path(self, _mock_graphql: AsyncMock) -> None:
|
|
tool_fn = _make_tool()
|
|
with pytest.raises(ToolError, match="destination_path"):
|
|
await tool_fn(
|
|
action="disk",
|
|
subaction="flash_backup",
|
|
confirm=True,
|
|
remote_name="r",
|
|
source_path="/boot",
|
|
)
|
|
|
|
async def test_flash_backup_success(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"initiateFlashBackup": {"status": "started", "jobId": "j:1"}}
|
|
tool_fn = _make_tool()
|
|
result = await tool_fn(
|
|
action="disk",
|
|
subaction="flash_backup",
|
|
confirm=True,
|
|
remote_name="r",
|
|
source_path="/boot",
|
|
destination_path="r:b",
|
|
)
|
|
assert result["success"] is True
|
|
assert result["data"]["status"] == "started"
|
|
|
|
async def test_flash_backup_passes_options(self, _mock_graphql: AsyncMock) -> None:
|
|
_mock_graphql.return_value = {"initiateFlashBackup": {"status": "started", "jobId": "j:2"}}
|
|
tool_fn = _make_tool()
|
|
await tool_fn(
|
|
action="disk",
|
|
subaction="flash_backup",
|
|
confirm=True,
|
|
remote_name="r",
|
|
source_path="/boot",
|
|
destination_path="r:b",
|
|
backup_options={"dryRun": True},
|
|
)
|
|
assert _mock_graphql.call_args[0][1]["input"]["options"] == {"dryRun": True}
|