Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c147d8be78 | |||
| 9ffaa18efe | |||
| d53f3fe207 | |||
| 4f1d757dd8 | |||
| ac75cc2e3a | |||
| f7f00d4e14 | |||
| 1c539d5f60 | |||
| 64fcd2967c | |||
| 4d050ff527 | |||
| 1944e2a9cf | |||
| 7e4066c609 | |||
| 4eeec5d808 |
@@ -12,7 +12,10 @@ WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
&& update-ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dependencies
|
||||
|
||||
66
HISTORY.md
66
HISTORY.md
@@ -5,10 +5,76 @@ Changelog
|
||||
(unreleased)
|
||||
------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Project association improvements, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.9.10 (2026-04-11)
|
||||
-------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- More git integration fixes, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.9 (2026-04-11)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Add missing git binary, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.8 (2026-04-11)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- More file change fixes, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.7 (2026-04-11)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- More file generation improvements, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.6 (2026-04-11)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Repo onboarding fix, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.5 (2026-04-11)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Better code generation, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.9.4 (2026-04-11)
|
||||
------------------
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.9.5
|
||||
0.9.11
|
||||
|
||||
@@ -2272,6 +2272,7 @@ class DatabaseManager:
|
||||
"timeline": [],
|
||||
"issues": [],
|
||||
"issue_work": [],
|
||||
"ui_data": {},
|
||||
}
|
||||
|
||||
# Get logs
|
||||
@@ -2296,6 +2297,7 @@ class DatabaseManager:
|
||||
llm_traces = self.get_llm_traces(project_id=project_id)
|
||||
correlations = self.get_prompt_change_correlations(project_id=project_id)
|
||||
code_changes, local_only_code_changes, orphan_code_changes = self._partition_code_changes(raw_code_changes, commits)
|
||||
ui_data = self._get_latest_ui_snapshot_data(history.id)
|
||||
repository = self._get_project_repository(history)
|
||||
timeline = self.get_project_timeline(project_id=project_id)
|
||||
repository_sync = self.get_repository_sync_status(project_id=project_id)
|
||||
@@ -2395,6 +2397,7 @@ class DatabaseManager:
|
||||
"repository_sync": repository_sync,
|
||||
"issues": issues,
|
||||
"issue_work": issue_work,
|
||||
"ui_data": ui_data,
|
||||
}
|
||||
|
||||
def get_prompt_events(self, project_id: str | None = None, limit: int = 100) -> list[dict]:
|
||||
|
||||
@@ -58,6 +58,18 @@ class GiteaAPI:
|
||||
"""Build a Gitea API URL from a relative path."""
|
||||
return f"{self.base_url}/api/v1/{path.lstrip('/')}"
|
||||
|
||||
def _normalize_pull_request_head(self, head: str | None, owner: str | None = None) -> str | None:
|
||||
"""Return a Gitea-compatible head ref for pull request creation."""
|
||||
normalized = (head or '').strip()
|
||||
if not normalized:
|
||||
return None
|
||||
if ':' in normalized:
|
||||
return normalized
|
||||
effective_owner = (owner or self.owner or '').strip()
|
||||
if not effective_owner:
|
||||
return normalized
|
||||
return f"{effective_owner}:{normalized}"
|
||||
|
||||
def build_repo_git_url(self, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||
"""Build the clone URL for a repository."""
|
||||
_owner = owner or self.owner
|
||||
@@ -222,11 +234,12 @@ class GiteaAPI:
|
||||
"""Create a pull request."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
normalized_head = self._normalize_pull_request_head(head, _owner)
|
||||
payload = {
|
||||
"title": title,
|
||||
"body": body,
|
||||
"base": base,
|
||||
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||
"head": normalized_head or f"{_owner}:{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||
}
|
||||
return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload)
|
||||
|
||||
@@ -242,11 +255,12 @@ class GiteaAPI:
|
||||
"""Synchronously create a pull request."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
normalized_head = self._normalize_pull_request_head(head, _owner)
|
||||
payload = {
|
||||
"title": title,
|
||||
"body": body,
|
||||
"base": base,
|
||||
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||
"head": normalized_head or f"{_owner}:{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||
}
|
||||
return self._request_sync("POST", f"repos/{_owner}/{_repo}/pulls", payload)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import json
|
||||
import py_compile
|
||||
import re
|
||||
import subprocess
|
||||
from pathlib import PurePosixPath
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
@@ -29,6 +30,13 @@ except ImportError:
|
||||
class AgentOrchestrator:
|
||||
"""Orchestrates the software generation process with full audit trail."""
|
||||
|
||||
REMOTE_READY_REPOSITORY_MODES = {'project', 'onboarded'}
|
||||
REMOTE_READY_REPOSITORY_STATUSES = {'created', 'exists', 'ready', 'onboarded'}
|
||||
GENERATED_TEXT_FILE_SUFFIXES = {'.py', '.md', '.txt', '.toml', '.yaml', '.yml', '.json', '.ini', '.cfg', '.sh', '.html', '.css', '.js', '.ts'}
|
||||
GENERATED_TEXT_FILE_NAMES = {'README', 'README.md', '.gitignore', 'requirements.txt', 'pyproject.toml', 'Dockerfile', 'Containerfile', 'Makefile'}
|
||||
MAX_WORKSPACE_CONTEXT_FILES = 20
|
||||
MAX_WORKSPACE_CONTEXT_CHARS = 24000
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
project_id: str,
|
||||
@@ -80,6 +88,7 @@ class AgentOrchestrator:
|
||||
self.branch_name = self._build_pr_branch_name(project_id)
|
||||
self.active_pull_request = None
|
||||
self._gitea_username: str | None = None
|
||||
existing_repository: dict | None = None
|
||||
hinted_issue_number = (related_issue_hint or {}).get('number') if related_issue_hint else None
|
||||
self.related_issue_number = hinted_issue_number if hinted_issue_number is not None else self._extract_issue_number(prompt_text)
|
||||
self.related_issue: dict | None = DatabaseManager._normalize_issue(related_issue_hint)
|
||||
@@ -110,9 +119,12 @@ class AgentOrchestrator:
|
||||
latest_ui = self.db_manager._get_latest_ui_snapshot_data(self.history.id)
|
||||
repository = latest_ui.get('repository') if isinstance(latest_ui, dict) else None
|
||||
if isinstance(repository, dict) and repository:
|
||||
existing_repository = dict(repository)
|
||||
self.repo_owner = repository.get('owner') or self.repo_owner
|
||||
self.repo_name = repository.get('name') or self.repo_name
|
||||
self.repo_url = repository.get('url') or self.repo_url
|
||||
git_state = latest_ui.get('git') if isinstance(latest_ui.get('git'), dict) else {}
|
||||
self.branch_name = git_state.get('active_branch') or self.branch_name
|
||||
if self.prompt_text:
|
||||
self.prompt_audit = self.db_manager.log_prompt_submission(
|
||||
history_id=self.history.id,
|
||||
@@ -121,6 +133,7 @@ class AgentOrchestrator:
|
||||
features=self.features,
|
||||
tech_stack=self.tech_stack,
|
||||
actor_name=self.prompt_actor,
|
||||
source=self.prompt_actor,
|
||||
related_issue={'number': self.related_issue_number} if self.related_issue_number is not None else None,
|
||||
source_context=self.prompt_source_context,
|
||||
routing=self.prompt_routing,
|
||||
@@ -129,18 +142,26 @@ class AgentOrchestrator:
|
||||
self.ui_manager.ui_data["project_root"] = str(self.project_root)
|
||||
self.ui_manager.ui_data["features"] = list(self.features)
|
||||
self.ui_manager.ui_data["tech_stack"] = list(self.tech_stack)
|
||||
self.ui_manager.ui_data["repository"] = {
|
||||
repository_ui = {
|
||||
"owner": self.repo_owner,
|
||||
"name": self.repo_name,
|
||||
"mode": "project" if settings.use_project_repositories else "shared",
|
||||
"status": "pending" if settings.use_project_repositories else "shared",
|
||||
"provider": "gitea",
|
||||
}
|
||||
if existing_repository:
|
||||
repository_ui.update(existing_repository)
|
||||
self.ui_manager.ui_data["repository"] = repository_ui
|
||||
if self.related_issue:
|
||||
self.ui_manager.ui_data["related_issue"] = self.related_issue
|
||||
if self.active_pull_request:
|
||||
self.ui_manager.ui_data["pull_request"] = self.active_pull_request
|
||||
|
||||
def _repository_supports_remote_delivery(self, repository: dict | None = None) -> bool:
|
||||
"""Return whether repository metadata supports git push and PR delivery."""
|
||||
repo = repository or self.ui_manager.ui_data.get('repository') or {}
|
||||
return repo.get('mode') in self.REMOTE_READY_REPOSITORY_MODES and repo.get('status') in self.REMOTE_READY_REPOSITORY_STATUSES
|
||||
|
||||
def _static_files(self) -> dict[str, str]:
|
||||
"""Files that do not need prompt-specific generation."""
|
||||
return {
|
||||
@@ -225,6 +246,59 @@ class AgentOrchestrator:
|
||||
fallback_used=False,
|
||||
)
|
||||
|
||||
def _is_safe_relative_path(self, path: str) -> bool:
|
||||
"""Return whether a generated file path is safe to write under the project root."""
|
||||
normalized = str(PurePosixPath((path or '').strip()))
|
||||
if not normalized or normalized in {'.', '..'}:
|
||||
return False
|
||||
if normalized.startswith('/') or normalized.startswith('../') or '/../' in normalized:
|
||||
return False
|
||||
if normalized.startswith('.git/'):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_supported_generated_text_file(self, path: str) -> bool:
|
||||
"""Return whether the generated path is a supported text artifact."""
|
||||
normalized = PurePosixPath(path)
|
||||
if normalized.name in self.GENERATED_TEXT_FILE_NAMES:
|
||||
return True
|
||||
return normalized.suffix.lower() in self.GENERATED_TEXT_FILE_SUFFIXES
|
||||
|
||||
def _collect_workspace_context(self) -> dict:
|
||||
"""Collect a compact, text-only snapshot of the current project workspace."""
|
||||
if not self.project_root.exists():
|
||||
return {'has_existing_files': False, 'files': []}
|
||||
|
||||
files: list[dict] = []
|
||||
total_chars = 0
|
||||
for path in sorted(self.project_root.rglob('*')):
|
||||
if not path.is_file():
|
||||
continue
|
||||
relative_path = path.relative_to(self.project_root).as_posix()
|
||||
if relative_path == '.gitignore':
|
||||
continue
|
||||
if not self._is_safe_relative_path(relative_path) or not self._is_supported_generated_text_file(relative_path):
|
||||
continue
|
||||
try:
|
||||
content = path.read_text(encoding='utf-8')
|
||||
except (UnicodeDecodeError, OSError):
|
||||
continue
|
||||
remaining_chars = self.MAX_WORKSPACE_CONTEXT_CHARS - total_chars
|
||||
if remaining_chars <= 0:
|
||||
break
|
||||
snippet = content[:remaining_chars]
|
||||
files.append(
|
||||
{
|
||||
'path': relative_path,
|
||||
'content': snippet,
|
||||
'truncated': len(snippet) < len(content),
|
||||
}
|
||||
)
|
||||
total_chars += len(snippet)
|
||||
if len(files) >= self.MAX_WORKSPACE_CONTEXT_FILES:
|
||||
break
|
||||
return {'has_existing_files': bool(files), 'files': files}
|
||||
|
||||
def _parse_generated_files(self, content: str | None) -> dict[str, str]:
|
||||
"""Parse an LLM file bundle response into relative-path/content pairs."""
|
||||
if not content:
|
||||
@@ -233,7 +307,6 @@ class AgentOrchestrator:
|
||||
parsed = json.loads(content)
|
||||
except Exception:
|
||||
return {}
|
||||
allowed_paths = set(self._fallback_generated_files().keys())
|
||||
generated: dict[str, str] = {}
|
||||
if isinstance(parsed, dict) and isinstance(parsed.get('files'), list):
|
||||
for item in parsed['files']:
|
||||
@@ -241,34 +314,68 @@ class AgentOrchestrator:
|
||||
continue
|
||||
path = str(item.get('path') or '').strip()
|
||||
file_content = item.get('content')
|
||||
if path in allowed_paths and isinstance(file_content, str) and file_content.strip():
|
||||
if (
|
||||
self._is_safe_relative_path(path)
|
||||
and self._is_supported_generated_text_file(path)
|
||||
and isinstance(file_content, str)
|
||||
and file_content.strip()
|
||||
):
|
||||
generated[path] = file_content.rstrip() + "\n"
|
||||
elif isinstance(parsed, dict):
|
||||
for path, file_content in parsed.items():
|
||||
if path in allowed_paths and isinstance(file_content, str) and file_content.strip():
|
||||
generated[str(path)] = file_content.rstrip() + "\n"
|
||||
normalized_path = str(path).strip()
|
||||
if (
|
||||
self._is_safe_relative_path(normalized_path)
|
||||
and self._is_supported_generated_text_file(normalized_path)
|
||||
and isinstance(file_content, str)
|
||||
and file_content.strip()
|
||||
):
|
||||
generated[normalized_path] = file_content.rstrip() + "\n"
|
||||
return generated
|
||||
|
||||
async def _generate_prompt_driven_files(self) -> tuple[dict[str, str], dict | None]:
|
||||
async def _generate_prompt_driven_files(self) -> tuple[dict[str, str], dict | None, bool]:
|
||||
"""Use the configured LLM to generate prompt-specific project files."""
|
||||
fallback_files = self._fallback_generated_files()
|
||||
system_prompt = (
|
||||
'You generate small but concrete starter projects. '
|
||||
'Return only JSON. Provide production-like but compact code that directly reflects the user request. '
|
||||
'Include the files README.md, requirements.txt, main.py, and tests/test_app.py. '
|
||||
'Use FastAPI for Python web requests unless the prompt clearly demands something else. '
|
||||
'The test must verify a real behavior from main.py. '
|
||||
'Do not wrap the JSON in markdown fences.'
|
||||
)
|
||||
user_prompt = (
|
||||
f"Project name: {self.project_name}\n"
|
||||
f"Description: {self.description}\n"
|
||||
f"Original prompt: {self.prompt_text or self.description}\n"
|
||||
f"Requested features: {json.dumps(self.features)}\n"
|
||||
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
|
||||
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
|
||||
"Return JSON shaped as {\"files\": [{\"path\": \"README.md\", \"content\": \"...\"}, ...]}."
|
||||
)
|
||||
workspace_context = self._collect_workspace_context()
|
||||
has_existing_files = bool(workspace_context.get('has_existing_files'))
|
||||
if has_existing_files:
|
||||
system_prompt = (
|
||||
'You modify an existing software repository. '
|
||||
'Return only JSON. Update the smallest necessary set of files to satisfy the new prompt. '
|
||||
'Prefer editing existing files over inventing a new starter app. '
|
||||
'Only return files that should be written. Omit unchanged files. '
|
||||
'Use repository-relative paths and do not wrap the JSON in markdown fences.'
|
||||
)
|
||||
user_prompt = (
|
||||
f"Project name: {self.project_name}\n"
|
||||
f"Description: {self.description}\n"
|
||||
f"Original prompt: {self.prompt_text or self.description}\n"
|
||||
f"Requested features: {json.dumps(self.features)}\n"
|
||||
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
|
||||
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
|
||||
f"Current workspace snapshot:\n{json.dumps(workspace_context['files'], indent=2)}\n\n"
|
||||
'Return JSON shaped as {"files": [{"path": "relative/path.py", "content": "..."}, ...]}. '
|
||||
'Each file path must be relative to the repository root.'
|
||||
)
|
||||
else:
|
||||
system_prompt = (
|
||||
'You generate small but concrete starter projects. '
|
||||
'Return only JSON. Provide production-like but compact code that directly reflects the user request. '
|
||||
'Include the files README.md, requirements.txt, main.py, and tests/test_app.py. '
|
||||
'Use FastAPI for Python web requests unless the prompt clearly demands something else. '
|
||||
'The test must verify a real behavior from main.py. '
|
||||
'Do not wrap the JSON in markdown fences.'
|
||||
)
|
||||
user_prompt = (
|
||||
f"Project name: {self.project_name}\n"
|
||||
f"Description: {self.description}\n"
|
||||
f"Original prompt: {self.prompt_text or self.description}\n"
|
||||
f"Requested features: {json.dumps(self.features)}\n"
|
||||
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
|
||||
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
|
||||
'Return JSON shaped as {"files": [{"path": "README.md", "content": "..."}, ...]}. '
|
||||
'At minimum include README.md, requirements.txt, main.py, and tests/test_app.py.'
|
||||
)
|
||||
content, trace = await LLMServiceClient().chat_with_trace(
|
||||
stage='generation_plan',
|
||||
system_prompt=system_prompt,
|
||||
@@ -278,12 +385,35 @@ class AgentOrchestrator:
|
||||
'project_name': self.project_name,
|
||||
'repository': self.ui_manager.ui_data.get('repository'),
|
||||
'related_issue': self.related_issue,
|
||||
'workspace_files': workspace_context.get('files', []),
|
||||
},
|
||||
expect_json=True,
|
||||
)
|
||||
raw_generated_paths = self._extract_raw_generated_paths(content)
|
||||
generated_files = self._parse_generated_files(content)
|
||||
accepted_paths = list(generated_files.keys())
|
||||
rejected_paths = [path for path in raw_generated_paths if path not in accepted_paths]
|
||||
generation_debug = {
|
||||
'raw_paths': raw_generated_paths,
|
||||
'accepted_paths': accepted_paths,
|
||||
'rejected_paths': rejected_paths,
|
||||
'existing_workspace': has_existing_files,
|
||||
}
|
||||
self.ui_manager.ui_data['generation_debug'] = generation_debug
|
||||
self._append_log(
|
||||
'LLM returned file candidates: '
|
||||
f"raw={raw_generated_paths or []}; accepted={accepted_paths or []}; rejected={rejected_paths or []}."
|
||||
)
|
||||
self._log_system_debug(
|
||||
'generation',
|
||||
'LLM file candidates '
|
||||
f"raw={raw_generated_paths or []}; accepted={accepted_paths or []}; rejected={rejected_paths or []}; "
|
||||
f"existing_workspace={has_existing_files}",
|
||||
)
|
||||
if has_existing_files:
|
||||
return generated_files, trace, True
|
||||
merged_files = {**fallback_files, **generated_files}
|
||||
return merged_files, trace
|
||||
return merged_files, trace, False
|
||||
|
||||
async def _sync_issue_context(self) -> None:
|
||||
"""Sync repository issues and resolve a linked issue from the prompt when present."""
|
||||
@@ -309,6 +439,14 @@ class AgentOrchestrator:
|
||||
self.db_manager.attach_issue_to_prompt(self.prompt_audit.id, self.related_issue)
|
||||
|
||||
async def _ensure_remote_repository(self) -> None:
|
||||
repository = self.ui_manager.ui_data.get("repository") or {}
|
||||
if self._repository_supports_remote_delivery(repository):
|
||||
repository.setdefault("provider", "gitea")
|
||||
repository.setdefault("status", "ready")
|
||||
if repository.get("url"):
|
||||
self.repo_url = repository.get("url")
|
||||
self.ui_manager.ui_data["repository"] = repository
|
||||
return
|
||||
if not settings.use_project_repositories:
|
||||
self.ui_manager.ui_data["repository"]["status"] = "shared"
|
||||
if settings.gitea_repo:
|
||||
@@ -400,9 +538,7 @@ class AgentOrchestrator:
|
||||
async def _push_branch(self, branch: str) -> dict | None:
|
||||
"""Push a branch to the configured project repository when available."""
|
||||
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||
if repository.get('mode') != 'project':
|
||||
return None
|
||||
if repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||
if not self._repository_supports_remote_delivery(repository):
|
||||
return None
|
||||
if not settings.gitea_token or not self.repo_owner or not self.repo_name:
|
||||
return None
|
||||
@@ -449,7 +585,7 @@ class AgentOrchestrator:
|
||||
self.ui_manager.ui_data['pull_request'] = self.active_pull_request
|
||||
return self.active_pull_request
|
||||
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||
if repository.get('mode') != 'project' or repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||
if not self._repository_supports_remote_delivery(repository):
|
||||
return None
|
||||
|
||||
title = f"AI delivery for {self.project_name}"
|
||||
@@ -458,6 +594,16 @@ class AgentOrchestrator:
|
||||
f"Prompt: {self.prompt_text or self.description}\n\n"
|
||||
f"Branch: {self.branch_name}"
|
||||
)
|
||||
pull_request_debug = self.ui_manager.ui_data.setdefault('git', {}).setdefault('pull_request_debug', {})
|
||||
pull_request_request = {
|
||||
'owner': self.repo_owner,
|
||||
'repo': self.repo_name,
|
||||
'title': title,
|
||||
'body': body,
|
||||
'base': 'main',
|
||||
'head': self.gitea_api._normalize_pull_request_head(self.branch_name, self.repo_owner) or self.branch_name,
|
||||
}
|
||||
pull_request_debug['request'] = pull_request_request
|
||||
result = await self.gitea_api.create_pull_request(
|
||||
title=title,
|
||||
body=body,
|
||||
@@ -466,7 +612,9 @@ class AgentOrchestrator:
|
||||
base='main',
|
||||
head=self.branch_name,
|
||||
)
|
||||
pull_request_debug['response'] = result
|
||||
if result.get('error'):
|
||||
pull_request_debug['status'] = 'error'
|
||||
raise RuntimeError(f"Unable to create pull request: {result.get('error')}")
|
||||
|
||||
pr_number = result.get('number') or result.get('id') or 0
|
||||
@@ -481,6 +629,8 @@ class AgentOrchestrator:
|
||||
'merged': bool(result.get('merged')),
|
||||
'pr_state': result.get('state', 'open'),
|
||||
}
|
||||
pull_request_debug['status'] = 'created'
|
||||
pull_request_debug['resolved'] = pr_data
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.save_pr_data(self.history.id, pr_data)
|
||||
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id) if self.db_manager else pr_data
|
||||
@@ -490,9 +640,7 @@ class AgentOrchestrator:
|
||||
async def _push_remote_commit(self, commit_hash: str, commit_message: str, changed_files: list[str], base_commit: str | None) -> dict | None:
|
||||
"""Push the local commit to the provisioned Gitea repository and build browser links."""
|
||||
repository = self.ui_manager.ui_data.get("repository") or {}
|
||||
if repository.get("mode") != "project":
|
||||
return None
|
||||
if repository.get("status") not in {"created", "exists", "ready"}:
|
||||
if not self._repository_supports_remote_delivery(repository):
|
||||
return None
|
||||
push_result = await self._push_branch(self.branch_name)
|
||||
if push_result is None:
|
||||
@@ -534,6 +682,35 @@ class AgentOrchestrator:
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager._log_action(self.history.id, "INFO", message)
|
||||
|
||||
def _log_system_debug(self, component: str, message: str, level: str = 'INFO') -> None:
|
||||
"""Persist a system-level debug breadcrumb for generation and git decisions."""
|
||||
if not self.db_manager:
|
||||
return
|
||||
self.db_manager.log_system_event(component=component, level=level, message=f"{self.project_id}: {message}")
|
||||
|
||||
def _extract_raw_generated_paths(self, content: str | None) -> list[str]:
|
||||
"""Return all file paths proposed by the LLM response before safety filtering."""
|
||||
if not content:
|
||||
return []
|
||||
try:
|
||||
parsed = json.loads(content)
|
||||
except Exception:
|
||||
return []
|
||||
raw_paths: list[str] = []
|
||||
if isinstance(parsed, dict) and isinstance(parsed.get('files'), list):
|
||||
for item in parsed['files']:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
path = str(item.get('path') or '').strip()
|
||||
if path:
|
||||
raw_paths.append(path)
|
||||
elif isinstance(parsed, dict):
|
||||
for path in parsed.keys():
|
||||
normalized_path = str(path).strip()
|
||||
if normalized_path:
|
||||
raw_paths.append(normalized_path)
|
||||
return raw_paths
|
||||
|
||||
def _update_progress(self, progress: int, step: str, message: str) -> None:
|
||||
self.progress = progress
|
||||
self.current_step = step
|
||||
@@ -552,6 +729,8 @@ class AgentOrchestrator:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
change_type = "UPDATE" if target.exists() else "CREATE"
|
||||
previous_content = target.read_text(encoding="utf-8") if target.exists() else ""
|
||||
if previous_content == content:
|
||||
return
|
||||
diff_text = self._build_diff_text(relative_path, previous_content, content)
|
||||
target.write_text(content, encoding="utf-8")
|
||||
self.changed_files.append(relative_path)
|
||||
@@ -660,9 +839,12 @@ class AgentOrchestrator:
|
||||
|
||||
async def _generate_code(self) -> None:
|
||||
"""Generate code using Ollama."""
|
||||
generated_files, trace = await self._generate_prompt_driven_files()
|
||||
change_count_before = len(self.pending_code_changes)
|
||||
generated_files, trace, editing_existing_workspace = await self._generate_prompt_driven_files()
|
||||
for relative_path, content in generated_files.items():
|
||||
self._write_file(relative_path, content)
|
||||
if editing_existing_workspace and len(self.pending_code_changes) == change_count_before:
|
||||
raise RuntimeError('The LLM response did not produce any file changes for the existing project.')
|
||||
fallback_used = bool(trace and trace.get('fallback_used')) or trace is None
|
||||
if self.db_manager and self.history and self.prompt_audit and trace:
|
||||
self.db_manager.log_llm_trace(
|
||||
@@ -692,11 +874,25 @@ class AgentOrchestrator:
|
||||
async def _commit_to_git(self) -> None:
|
||||
"""Commit changes to git."""
|
||||
unique_files = list(dict.fromkeys(self.changed_files))
|
||||
git_debug = self.ui_manager.ui_data.setdefault('git', {})
|
||||
if not unique_files:
|
||||
git_debug.update({
|
||||
'commit_status': 'skipped',
|
||||
'early_exit_reason': 'changed_files_empty',
|
||||
'candidate_files': [],
|
||||
})
|
||||
self._append_log('Git commit skipped: no generated files were marked as changed.')
|
||||
self._log_system_debug('git', 'Commit exited early because changed_files was empty.')
|
||||
return
|
||||
if not self.git_manager.is_git_available():
|
||||
self.ui_manager.ui_data.setdefault('git', {})['error'] = 'git executable is not available in PATH'
|
||||
git_debug.update({
|
||||
'commit_status': 'error',
|
||||
'early_exit_reason': 'git_unavailable',
|
||||
'candidate_files': unique_files,
|
||||
'error': 'git executable is not available in PATH',
|
||||
})
|
||||
self._append_log('Git commit skipped: git executable is not available in PATH')
|
||||
self._log_system_debug('git', 'Commit exited early because git is unavailable.', level='ERROR')
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -704,7 +900,23 @@ class AgentOrchestrator:
|
||||
self.git_manager.init_repo()
|
||||
base_commit = self.git_manager.current_head_or_none()
|
||||
self.git_manager.add_files(unique_files)
|
||||
if not self.git_manager.get_status():
|
||||
status_after_add = self.git_manager.get_status()
|
||||
if not status_after_add:
|
||||
git_debug.update({
|
||||
'commit_status': 'skipped',
|
||||
'early_exit_reason': 'clean_after_staging',
|
||||
'candidate_files': unique_files,
|
||||
'status_after_add': '',
|
||||
})
|
||||
self._append_log(
|
||||
'Git commit skipped: working tree was clean after staging candidate files '
|
||||
f'{unique_files}. No repository diff was created.'
|
||||
)
|
||||
self._log_system_debug(
|
||||
'git',
|
||||
'Commit exited early because git status was clean after staging '
|
||||
f'files={unique_files}',
|
||||
)
|
||||
return
|
||||
|
||||
commit_message = f"AI generation for prompt: {self.project_name}"
|
||||
@@ -717,11 +929,17 @@ class AgentOrchestrator:
|
||||
"scope": "local",
|
||||
"branch": self.branch_name,
|
||||
}
|
||||
git_debug.update({
|
||||
'commit_status': 'committed',
|
||||
'early_exit_reason': None,
|
||||
'candidate_files': unique_files,
|
||||
'status_after_add': status_after_add,
|
||||
})
|
||||
remote_record = None
|
||||
try:
|
||||
remote_record = await self._push_remote_commit(commit_hash, commit_message, unique_files, base_commit)
|
||||
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as remote_exc:
|
||||
self.ui_manager.ui_data.setdefault("git", {})["remote_error"] = str(remote_exc)
|
||||
git_debug["remote_error"] = str(remote_exc)
|
||||
self._append_log(f"Remote git push skipped: {remote_exc}")
|
||||
|
||||
if remote_record:
|
||||
@@ -731,8 +949,8 @@ class AgentOrchestrator:
|
||||
if remote_record.get('pull_request'):
|
||||
commit_record['pull_request'] = remote_record['pull_request']
|
||||
self.ui_manager.ui_data['pull_request'] = remote_record['pull_request']
|
||||
self.ui_manager.ui_data.setdefault("git", {})["latest_commit"] = commit_record
|
||||
self.ui_manager.ui_data.setdefault("git", {})["commits"] = [commit_record]
|
||||
git_debug["latest_commit"] = commit_record
|
||||
git_debug["commits"] = [commit_record]
|
||||
self._append_log(f"Recorded git commit {commit_hash[:12]} for generated files.")
|
||||
if self.db_manager:
|
||||
self.db_manager.log_commit(
|
||||
@@ -778,7 +996,12 @@ class AgentOrchestrator:
|
||||
commit_url=remote_record.get('commit_url') if remote_record else None,
|
||||
)
|
||||
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||
self.ui_manager.ui_data.setdefault("git", {})["error"] = str(exc)
|
||||
git_debug.update({
|
||||
'commit_status': 'error',
|
||||
'early_exit_reason': 'commit_exception',
|
||||
'candidate_files': unique_files,
|
||||
'error': str(exc),
|
||||
})
|
||||
self._append_log(f"Git commit skipped: {exc}")
|
||||
|
||||
async def _create_pr(self) -> None:
|
||||
|
||||
@@ -31,6 +31,11 @@ class RequestInterpreter:
|
||||
PLACEHOLDER_PROJECT_NAME_WORDS = {
|
||||
'generated project', 'new project', 'project', 'temporary name', 'temp name', 'placeholder', 'untitled project',
|
||||
}
|
||||
ROUTING_STOPWORDS = REPO_NOISE_WORDS | GENERIC_PROJECT_NAME_WORDS | {
|
||||
'about', 'after', 'again', 'appropriate', 'before', 'best', 'details', 'follow', 'following', 'implement',
|
||||
'integration', 'instance', 'instances', 'later', 'make', 'now', 'primary', 'primarily', 'probably',
|
||||
'remember', 'specific', 'suite', 'tearing', 'testing', 'through', 'used', 'using', 'workflow', 'workflows',
|
||||
}
|
||||
|
||||
def __init__(self, ollama_url: str | None = None, model: str | None = None):
|
||||
self.ollama_url = (ollama_url or settings.ollama_url).rstrip('/')
|
||||
@@ -96,6 +101,8 @@ class RequestInterpreter:
|
||||
parsed = json.loads(content)
|
||||
interpreted = self._normalize_interpreted_request(parsed, normalized)
|
||||
routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context)
|
||||
if routing.get('intent') == 'continue_project' and routing.get('project_name'):
|
||||
interpreted['name'] = routing['project_name']
|
||||
naming_trace = None
|
||||
if routing.get('intent') == 'new_project':
|
||||
interpreted, routing, naming_trace = await self._refine_new_project_identity(
|
||||
@@ -265,6 +272,14 @@ class RequestInterpreter:
|
||||
matched_project = project
|
||||
break
|
||||
intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project')
|
||||
if matched_project is None and intent == 'continue_project':
|
||||
recent_chat_history = context.get('recent_chat_history', [])
|
||||
recent_project_id = recent_chat_history[0].get('project_id') if recent_chat_history else None
|
||||
if recent_project_id:
|
||||
matched_project = next(
|
||||
(project for project in context.get('projects', []) if project.get('project_id') == recent_project_id),
|
||||
None,
|
||||
)
|
||||
normalized = {
|
||||
'intent': intent,
|
||||
'project_id': matched_project.get('project_id') if matched_project else project_id,
|
||||
@@ -458,6 +473,7 @@ class RequestInterpreter:
|
||||
projects = context.get('projects', [])
|
||||
last_project_id = recent_history[0].get('project_id') if recent_history else None
|
||||
last_issue = ((recent_history[0].get('related_issue') or {}).get('number') if recent_history else None)
|
||||
last_project = next((project for project in projects if project.get('project_id') == last_project_id), None) if last_project_id else None
|
||||
|
||||
matched_project = None
|
||||
for project in projects:
|
||||
@@ -471,8 +487,24 @@ class RequestInterpreter:
|
||||
break
|
||||
if matched_project is None and not explicit_new:
|
||||
follow_up_tokens = ['also', 'continue', 'for this project', 'for that project', 'work on this', 'work on that', 'fix that', 'add this']
|
||||
if any(token in lowered for token in follow_up_tokens) and last_project_id:
|
||||
matched_project = next((project for project in projects if project.get('project_id') == last_project_id), None)
|
||||
leading_follow_up = lowered.startswith(('also', 'now', 'continue', 'remember', 'then'))
|
||||
recent_overlap = 0
|
||||
if last_project is not None:
|
||||
recent_prompt_text = recent_history[0].get('prompt_text') or ''
|
||||
project_reference_text = ' '.join(
|
||||
part for part in [
|
||||
last_project.get('name') or '',
|
||||
last_project.get('description') or '',
|
||||
((last_project.get('repository') or {}).get('name') or ''),
|
||||
]
|
||||
if part
|
||||
)
|
||||
recent_overlap = len(
|
||||
self._routing_tokens(prompt_text)
|
||||
& (self._routing_tokens(recent_prompt_text) | self._routing_tokens(project_reference_text))
|
||||
)
|
||||
if last_project_id and (leading_follow_up or any(token in lowered for token in follow_up_tokens) or recent_overlap >= 2):
|
||||
matched_project = last_project
|
||||
issue_number = referenced_issue
|
||||
if issue_number is None and any(token in lowered for token in ['that issue', 'this issue', 'the issue']) and last_issue is not None:
|
||||
issue_number = last_issue
|
||||
@@ -487,6 +519,14 @@ class RequestInterpreter:
|
||||
'reasoning_summary': 'Heuristic routing from chat history and project names.',
|
||||
}
|
||||
|
||||
def _routing_tokens(self, text: str) -> set[str]:
|
||||
"""Extract meaningful tokens for heuristic continuation matching."""
|
||||
cleaned = re.sub(r'[^a-z0-9]+', ' ', (text or '').lower())
|
||||
return {
|
||||
token for token in cleaned.split()
|
||||
if len(token) >= 4 and token not in self.ROUTING_STOPWORDS
|
||||
}
|
||||
|
||||
def _extract_issue_number(self, prompt_text: str) -> int | None:
|
||||
match = re.search(r'(?:#|issue\s+)(\d+)', prompt_text, flags=re.IGNORECASE)
|
||||
return int(match.group(1)) if match else None
|
||||
@@ -214,6 +214,70 @@ def _render_commit_list(commits: list[dict]) -> None:
|
||||
ui.link('Open compare view', compare_url, new_tab=True)
|
||||
|
||||
|
||||
def _render_generation_diagnostics(ui_data: dict | None) -> None:
|
||||
"""Render generation and git diagnostics from the latest UI snapshot."""
|
||||
snapshot = ui_data if isinstance(ui_data, dict) else {}
|
||||
generation_debug = snapshot.get('generation_debug') if isinstance(snapshot.get('generation_debug'), dict) else {}
|
||||
git_debug = snapshot.get('git') if isinstance(snapshot.get('git'), dict) else {}
|
||||
|
||||
if not generation_debug and not git_debug:
|
||||
ui.label('No generation diagnostics captured yet.').classes('factory-muted')
|
||||
return
|
||||
|
||||
def _render_path_row(label: str, values: list[str]) -> None:
|
||||
text = ', '.join(values) if values else 'none'
|
||||
ui.label(f'{label}: {text}').classes('factory-muted' if values else 'factory-code')
|
||||
|
||||
with ui.column().classes('gap-3 w-full'):
|
||||
if generation_debug:
|
||||
with ui.column().classes('gap-1'):
|
||||
ui.label('Generation filtering').style('font-weight: 700; color: #2f241d;')
|
||||
ui.label(
|
||||
'Existing workspace: '
|
||||
+ ('yes' if generation_debug.get('existing_workspace') else 'no')
|
||||
).classes('factory-muted')
|
||||
_render_path_row('Raw paths', generation_debug.get('raw_paths') or [])
|
||||
_render_path_row('Accepted paths', generation_debug.get('accepted_paths') or [])
|
||||
_render_path_row('Rejected paths', generation_debug.get('rejected_paths') or [])
|
||||
if git_debug:
|
||||
with ui.column().classes('gap-1'):
|
||||
ui.label('Git outcome').style('font-weight: 700; color: #2f241d;')
|
||||
if git_debug.get('commit_status'):
|
||||
with ui.row().classes('items-center gap-2'):
|
||||
ui.label(git_debug['commit_status']).classes('factory-chip')
|
||||
if git_debug.get('early_exit_reason'):
|
||||
ui.label(git_debug['early_exit_reason']).classes('factory-chip')
|
||||
if git_debug.get('candidate_files') is not None:
|
||||
_render_path_row('Candidate files', git_debug.get('candidate_files') or [])
|
||||
latest_commit = git_debug.get('latest_commit') if isinstance(git_debug.get('latest_commit'), dict) else {}
|
||||
if latest_commit:
|
||||
ui.label(
|
||||
f"Latest commit: {(latest_commit.get('hash') or 'unknown')[:12]} · {latest_commit.get('scope') or 'local'}"
|
||||
).classes('factory-muted')
|
||||
if git_debug.get('status_after_add'):
|
||||
with ui.expansion('Git status after staging').classes('w-full q-mt-sm'):
|
||||
ui.label(str(git_debug['status_after_add'])).classes('factory-code')
|
||||
if git_debug.get('remote_error'):
|
||||
ui.label(f"Remote push error: {git_debug['remote_error']}").classes('factory-code')
|
||||
if git_debug.get('error'):
|
||||
ui.label(f"Git error: {git_debug['error']}").classes('factory-code')
|
||||
pull_request_debug = git_debug.get('pull_request_debug') if isinstance(git_debug.get('pull_request_debug'), dict) else {}
|
||||
if pull_request_debug:
|
||||
ui.label('Pull request creation').style('font-weight: 700; color: #2f241d;')
|
||||
if pull_request_debug.get('status'):
|
||||
ui.label(str(pull_request_debug['status'])).classes('factory-chip')
|
||||
if pull_request_debug.get('request'):
|
||||
with ui.expansion('PR request payload').classes('w-full q-mt-sm'):
|
||||
ui.label(json.dumps(pull_request_debug['request'], indent=2, sort_keys=True)).classes('factory-code')
|
||||
if pull_request_debug.get('response'):
|
||||
with ui.expansion('PR API response').classes('w-full q-mt-sm'):
|
||||
ui.label(json.dumps(pull_request_debug['response'], indent=2, sort_keys=True)).classes('factory-code')
|
||||
if pull_request_debug.get('resolved'):
|
||||
resolved = pull_request_debug['resolved']
|
||||
if resolved.get('pr_url'):
|
||||
ui.link('Open pull request', resolved['pr_url'], new_tab=True).classes('factory-code')
|
||||
|
||||
|
||||
def _render_timeline(events: list[dict]) -> None:
|
||||
"""Render a mixed project timeline."""
|
||||
if not events:
|
||||
@@ -1576,6 +1640,9 @@ def create_dashboard():
|
||||
'Sync Repo Activity',
|
||||
on_click=lambda _=None, project_id=project['project_id']: sync_project_repository_action(project_id),
|
||||
).props('outline color=secondary').classes('q-mt-md')
|
||||
with ui.card().classes('q-pa-md'):
|
||||
ui.label('Generation Diagnostics').style('font-weight: 700; color: #3a281a;')
|
||||
_render_generation_diagnostics(project_bundle.get('ui_data'))
|
||||
|
||||
@ui.refreshable
|
||||
def render_archived_panel() -> None:
|
||||
@@ -1642,6 +1709,9 @@ def create_dashboard():
|
||||
with ui.card().classes('q-pa-md'):
|
||||
ui.label('Repository').style('font-weight: 700; color: #3a281a;')
|
||||
_render_repository_block(project_bundle.get('repository') or project.get('repository'))
|
||||
with ui.card().classes('q-pa-md'):
|
||||
ui.label('Generation Diagnostics').style('font-weight: 700; color: #3a281a;')
|
||||
_render_generation_diagnostics(project_bundle.get('ui_data'))
|
||||
with ui.card().classes('q-pa-md'):
|
||||
ui.label('Prompt').style('font-weight: 700; color: #3a281a;')
|
||||
prompts = project_bundle.get('prompts', [])
|
||||
|
||||
@@ -6,7 +6,7 @@ from urllib.parse import urlparse
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from sqlalchemy import create_engine, event, text
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
@@ -64,20 +64,6 @@ def get_engine() -> Engine:
|
||||
pool_timeout=settings.DB_POOL_TIMEOUT or 30
|
||||
)
|
||||
|
||||
# Event listener for connection checkout (PostgreSQL only)
|
||||
if not settings.use_sqlite:
|
||||
@event.listens_for(engine, "checkout")
|
||||
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
||||
"""Log connection checkout for audit purposes."""
|
||||
if settings.LOG_LEVEL in ("DEBUG", "INFO"):
|
||||
print(f"DB Connection checked out from pool")
|
||||
|
||||
@event.listens_for(engine, "checkin")
|
||||
def receive_checkin(dbapi_connection, connection_record):
|
||||
"""Log connection checkin for audit purposes."""
|
||||
if settings.LOG_LEVEL == "DEBUG":
|
||||
print(f"DB Connection returned to pool")
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
|
||||
@@ -241,6 +241,17 @@ def _serialize_project_log(log: ProjectLog) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def _ensure_summary_mentions_pull_request(summary_message: str, pull_request: dict | None) -> str:
|
||||
"""Append the pull request URL to chat summaries when one exists."""
|
||||
if not isinstance(pull_request, dict):
|
||||
return summary_message
|
||||
pr_url = (pull_request.get('pr_url') or '').strip()
|
||||
if not pr_url or pr_url in summary_message:
|
||||
return summary_message
|
||||
separator = '' if summary_message.endswith(('.', '!', '?')) else '.'
|
||||
return f"{summary_message}{separator} Review PR: {pr_url}"
|
||||
|
||||
|
||||
def _serialize_system_log(log: SystemLog) -> dict:
|
||||
"""Serialize a system log row."""
|
||||
return {
|
||||
@@ -306,7 +317,7 @@ async def _run_generation(
|
||||
resolved_prompt_text = prompt_text or _compose_prompt_text(request)
|
||||
if preferred_project_id and reusable_history is not None:
|
||||
project_id = reusable_history.project_id
|
||||
elif reusable_history and not is_explicit_new_project and manager.get_open_pull_request(project_id=reusable_history.project_id):
|
||||
elif reusable_history and not is_explicit_new_project:
|
||||
project_id = reusable_history.project_id
|
||||
else:
|
||||
if is_explicit_new_project or prompt_text:
|
||||
@@ -348,6 +359,8 @@ async def _run_generation(
|
||||
response_data = _serialize_project(history)
|
||||
response_data['logs'] = [_serialize_project_log(log) for log in project_logs]
|
||||
response_data['ui_data'] = result.get('ui_data')
|
||||
response_data['generation_debug'] = ((result.get('ui_data') or {}).get('generation_debug'))
|
||||
response_data['git_debug'] = ((result.get('ui_data') or {}).get('git'))
|
||||
response_data['features'] = request.features
|
||||
response_data['tech_stack'] = request.tech_stack
|
||||
response_data['project_root'] = result.get('project_root', str(_project_root(project_id)))
|
||||
@@ -389,6 +402,7 @@ async def _run_generation(
|
||||
'logs': [log.get('message', '') for log in response_data.get('logs', []) if isinstance(log, dict)],
|
||||
}
|
||||
summary_message, summary_trace = await ChangeSummaryGenerator().summarize_with_trace(summary_context)
|
||||
summary_message = _ensure_summary_mentions_pull_request(summary_message, response_data.get('pull_request'))
|
||||
if orchestrator.db_manager and orchestrator.history and orchestrator.prompt_audit:
|
||||
orchestrator.db_manager.log_llm_trace(
|
||||
project_id=project_id,
|
||||
|
||||
Reference in New Issue
Block a user