8 Commits

Author SHA1 Message Date
d53f3fe207 release: version 0.9.10 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 10s
Upload Python Package / deploy (push) Successful in 31s
2026-04-11 18:05:25 +02:00
4f1d757dd8 fix: more git integration fixes, refs NOISSUE 2026-04-11 18:05:20 +02:00
ac75cc2e3a release: version 0.9.9 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 14s
Upload Python Package / deploy (push) Successful in 2m17s
2026-04-11 17:41:29 +02:00
f7f00d4e14 fix: add missing git binary, refs NOISSUE 2026-04-11 17:41:24 +02:00
1c539d5f60 release: version 0.9.8 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 12s
Upload Python Package / deploy (push) Successful in 29s
2026-04-11 16:32:23 +02:00
64fcd2967c fix: more file change fixes, refs NOISSUE 2026-04-11 16:32:19 +02:00
4d050ff527 release: version 0.9.7 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 22s
Upload Python Package / deploy (push) Successful in 1m19s
2026-04-11 14:33:47 +02:00
1944e2a9cf fix: more file generation improvements, refs NOISSUE 2026-04-11 14:33:45 +02:00
9 changed files with 395 additions and 34 deletions

View File

@@ -12,7 +12,10 @@ WORKDIR /app
# Install system dependencies # Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
curl \ curl \
git \
&& update-ca-certificates \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install dependencies # Install dependencies

View File

@@ -5,10 +5,54 @@ Changelog
(unreleased) (unreleased)
------------ ------------
Fix
~~~
- More git integration fixes, refs NOISSUE. [Simon Diesenreiter]
0.9.9 (2026-04-11)
------------------
Fix
~~~
- Add missing git binary, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.8 (2026-04-11)
------------------
Fix
~~~
- More file change fixes, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.7 (2026-04-11)
------------------
Fix
~~~
- More file generation improvements, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.6 (2026-04-11)
------------------
Fix Fix
~~~ ~~~
- Repo onboarding fix, refs NOISSUE. [Simon Diesenreiter] - Repo onboarding fix, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.5 (2026-04-11) 0.9.5 (2026-04-11)
------------------ ------------------

View File

@@ -1 +1 @@
0.9.6 0.9.10

View File

@@ -2272,6 +2272,7 @@ class DatabaseManager:
"timeline": [], "timeline": [],
"issues": [], "issues": [],
"issue_work": [], "issue_work": [],
"ui_data": {},
} }
# Get logs # Get logs
@@ -2296,6 +2297,7 @@ class DatabaseManager:
llm_traces = self.get_llm_traces(project_id=project_id) llm_traces = self.get_llm_traces(project_id=project_id)
correlations = self.get_prompt_change_correlations(project_id=project_id) correlations = self.get_prompt_change_correlations(project_id=project_id)
code_changes, local_only_code_changes, orphan_code_changes = self._partition_code_changes(raw_code_changes, commits) code_changes, local_only_code_changes, orphan_code_changes = self._partition_code_changes(raw_code_changes, commits)
ui_data = self._get_latest_ui_snapshot_data(history.id)
repository = self._get_project_repository(history) repository = self._get_project_repository(history)
timeline = self.get_project_timeline(project_id=project_id) timeline = self.get_project_timeline(project_id=project_id)
repository_sync = self.get_repository_sync_status(project_id=project_id) repository_sync = self.get_repository_sync_status(project_id=project_id)
@@ -2395,6 +2397,7 @@ class DatabaseManager:
"repository_sync": repository_sync, "repository_sync": repository_sync,
"issues": issues, "issues": issues,
"issue_work": issue_work, "issue_work": issue_work,
"ui_data": ui_data,
} }
def get_prompt_events(self, project_id: str | None = None, limit: int = 100) -> list[dict]: def get_prompt_events(self, project_id: str | None = None, limit: int = 100) -> list[dict]:

View File

@@ -58,6 +58,18 @@ class GiteaAPI:
"""Build a Gitea API URL from a relative path.""" """Build a Gitea API URL from a relative path."""
return f"{self.base_url}/api/v1/{path.lstrip('/')}" return f"{self.base_url}/api/v1/{path.lstrip('/')}"
def _normalize_pull_request_head(self, head: str | None, owner: str | None = None) -> str | None:
"""Return a Gitea-compatible head ref for pull request creation."""
normalized = (head or '').strip()
if not normalized:
return None
if ':' in normalized:
return normalized
effective_owner = (owner or self.owner or '').strip()
if not effective_owner:
return normalized
return f"{effective_owner}:{normalized}"
def build_repo_git_url(self, owner: str | None = None, repo: str | None = None) -> str | None: def build_repo_git_url(self, owner: str | None = None, repo: str | None = None) -> str | None:
"""Build the clone URL for a repository.""" """Build the clone URL for a repository."""
_owner = owner or self.owner _owner = owner or self.owner
@@ -222,11 +234,12 @@ class GiteaAPI:
"""Create a pull request.""" """Create a pull request."""
_owner = owner or self.owner _owner = owner or self.owner
_repo = repo or self.repo _repo = repo or self.repo
normalized_head = self._normalize_pull_request_head(head, _owner)
payload = { payload = {
"title": title, "title": title,
"body": body, "body": body,
"base": base, "base": base,
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}", "head": normalized_head or f"{_owner}:{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
} }
return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload) return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload)
@@ -242,11 +255,12 @@ class GiteaAPI:
"""Synchronously create a pull request.""" """Synchronously create a pull request."""
_owner = owner or self.owner _owner = owner or self.owner
_repo = repo or self.repo _repo = repo or self.repo
normalized_head = self._normalize_pull_request_head(head, _owner)
payload = { payload = {
"title": title, "title": title,
"body": body, "body": body,
"base": base, "base": base,
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}", "head": normalized_head or f"{_owner}:{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
} }
return self._request_sync("POST", f"repos/{_owner}/{_repo}/pulls", payload) return self._request_sync("POST", f"repos/{_owner}/{_repo}/pulls", payload)

View File

@@ -7,6 +7,7 @@ import json
import py_compile import py_compile
import re import re
import subprocess import subprocess
from pathlib import PurePosixPath
from typing import Optional from typing import Optional
from datetime import datetime from datetime import datetime
@@ -31,6 +32,10 @@ class AgentOrchestrator:
REMOTE_READY_REPOSITORY_MODES = {'project', 'onboarded'} REMOTE_READY_REPOSITORY_MODES = {'project', 'onboarded'}
REMOTE_READY_REPOSITORY_STATUSES = {'created', 'exists', 'ready', 'onboarded'} REMOTE_READY_REPOSITORY_STATUSES = {'created', 'exists', 'ready', 'onboarded'}
GENERATED_TEXT_FILE_SUFFIXES = {'.py', '.md', '.txt', '.toml', '.yaml', '.yml', '.json', '.ini', '.cfg', '.sh', '.html', '.css', '.js', '.ts'}
GENERATED_TEXT_FILE_NAMES = {'README', 'README.md', '.gitignore', 'requirements.txt', 'pyproject.toml', 'Dockerfile', 'Containerfile', 'Makefile'}
MAX_WORKSPACE_CONTEXT_FILES = 20
MAX_WORKSPACE_CONTEXT_CHARS = 24000
def __init__( def __init__(
self, self,
@@ -240,6 +245,59 @@ class AgentOrchestrator:
fallback_used=False, fallback_used=False,
) )
def _is_safe_relative_path(self, path: str) -> bool:
"""Return whether a generated file path is safe to write under the project root."""
normalized = str(PurePosixPath((path or '').strip()))
if not normalized or normalized in {'.', '..'}:
return False
if normalized.startswith('/') or normalized.startswith('../') or '/../' in normalized:
return False
if normalized.startswith('.git/'):
return False
return True
def _is_supported_generated_text_file(self, path: str) -> bool:
"""Return whether the generated path is a supported text artifact."""
normalized = PurePosixPath(path)
if normalized.name in self.GENERATED_TEXT_FILE_NAMES:
return True
return normalized.suffix.lower() in self.GENERATED_TEXT_FILE_SUFFIXES
def _collect_workspace_context(self) -> dict:
"""Collect a compact, text-only snapshot of the current project workspace."""
if not self.project_root.exists():
return {'has_existing_files': False, 'files': []}
files: list[dict] = []
total_chars = 0
for path in sorted(self.project_root.rglob('*')):
if not path.is_file():
continue
relative_path = path.relative_to(self.project_root).as_posix()
if relative_path == '.gitignore':
continue
if not self._is_safe_relative_path(relative_path) or not self._is_supported_generated_text_file(relative_path):
continue
try:
content = path.read_text(encoding='utf-8')
except (UnicodeDecodeError, OSError):
continue
remaining_chars = self.MAX_WORKSPACE_CONTEXT_CHARS - total_chars
if remaining_chars <= 0:
break
snippet = content[:remaining_chars]
files.append(
{
'path': relative_path,
'content': snippet,
'truncated': len(snippet) < len(content),
}
)
total_chars += len(snippet)
if len(files) >= self.MAX_WORKSPACE_CONTEXT_FILES:
break
return {'has_existing_files': bool(files), 'files': files}
def _parse_generated_files(self, content: str | None) -> dict[str, str]: def _parse_generated_files(self, content: str | None) -> dict[str, str]:
"""Parse an LLM file bundle response into relative-path/content pairs.""" """Parse an LLM file bundle response into relative-path/content pairs."""
if not content: if not content:
@@ -248,7 +306,6 @@ class AgentOrchestrator:
parsed = json.loads(content) parsed = json.loads(content)
except Exception: except Exception:
return {} return {}
allowed_paths = set(self._fallback_generated_files().keys())
generated: dict[str, str] = {} generated: dict[str, str] = {}
if isinstance(parsed, dict) and isinstance(parsed.get('files'), list): if isinstance(parsed, dict) and isinstance(parsed.get('files'), list):
for item in parsed['files']: for item in parsed['files']:
@@ -256,17 +313,50 @@ class AgentOrchestrator:
continue continue
path = str(item.get('path') or '').strip() path = str(item.get('path') or '').strip()
file_content = item.get('content') file_content = item.get('content')
if path in allowed_paths and isinstance(file_content, str) and file_content.strip(): if (
self._is_safe_relative_path(path)
and self._is_supported_generated_text_file(path)
and isinstance(file_content, str)
and file_content.strip()
):
generated[path] = file_content.rstrip() + "\n" generated[path] = file_content.rstrip() + "\n"
elif isinstance(parsed, dict): elif isinstance(parsed, dict):
for path, file_content in parsed.items(): for path, file_content in parsed.items():
if path in allowed_paths and isinstance(file_content, str) and file_content.strip(): normalized_path = str(path).strip()
generated[str(path)] = file_content.rstrip() + "\n" if (
self._is_safe_relative_path(normalized_path)
and self._is_supported_generated_text_file(normalized_path)
and isinstance(file_content, str)
and file_content.strip()
):
generated[normalized_path] = file_content.rstrip() + "\n"
return generated return generated
async def _generate_prompt_driven_files(self) -> tuple[dict[str, str], dict | None]: async def _generate_prompt_driven_files(self) -> tuple[dict[str, str], dict | None, bool]:
"""Use the configured LLM to generate prompt-specific project files.""" """Use the configured LLM to generate prompt-specific project files."""
fallback_files = self._fallback_generated_files() fallback_files = self._fallback_generated_files()
workspace_context = self._collect_workspace_context()
has_existing_files = bool(workspace_context.get('has_existing_files'))
if has_existing_files:
system_prompt = (
'You modify an existing software repository. '
'Return only JSON. Update the smallest necessary set of files to satisfy the new prompt. '
'Prefer editing existing files over inventing a new starter app. '
'Only return files that should be written. Omit unchanged files. '
'Use repository-relative paths and do not wrap the JSON in markdown fences.'
)
user_prompt = (
f"Project name: {self.project_name}\n"
f"Description: {self.description}\n"
f"Original prompt: {self.prompt_text or self.description}\n"
f"Requested features: {json.dumps(self.features)}\n"
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
f"Current workspace snapshot:\n{json.dumps(workspace_context['files'], indent=2)}\n\n"
'Return JSON shaped as {"files": [{"path": "relative/path.py", "content": "..."}, ...]}. '
'Each file path must be relative to the repository root.'
)
else:
system_prompt = ( system_prompt = (
'You generate small but concrete starter projects. ' 'You generate small but concrete starter projects. '
'Return only JSON. Provide production-like but compact code that directly reflects the user request. ' 'Return only JSON. Provide production-like but compact code that directly reflects the user request. '
@@ -282,7 +372,8 @@ class AgentOrchestrator:
f"Requested features: {json.dumps(self.features)}\n" f"Requested features: {json.dumps(self.features)}\n"
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n" f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n" f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
"Return JSON shaped as {\"files\": [{\"path\": \"README.md\", \"content\": \"...\"}, ...]}." 'Return JSON shaped as {"files": [{"path": "README.md", "content": "..."}, ...]}. '
'At minimum include README.md, requirements.txt, main.py, and tests/test_app.py.'
) )
content, trace = await LLMServiceClient().chat_with_trace( content, trace = await LLMServiceClient().chat_with_trace(
stage='generation_plan', stage='generation_plan',
@@ -293,12 +384,35 @@ class AgentOrchestrator:
'project_name': self.project_name, 'project_name': self.project_name,
'repository': self.ui_manager.ui_data.get('repository'), 'repository': self.ui_manager.ui_data.get('repository'),
'related_issue': self.related_issue, 'related_issue': self.related_issue,
'workspace_files': workspace_context.get('files', []),
}, },
expect_json=True, expect_json=True,
) )
raw_generated_paths = self._extract_raw_generated_paths(content)
generated_files = self._parse_generated_files(content) generated_files = self._parse_generated_files(content)
accepted_paths = list(generated_files.keys())
rejected_paths = [path for path in raw_generated_paths if path not in accepted_paths]
generation_debug = {
'raw_paths': raw_generated_paths,
'accepted_paths': accepted_paths,
'rejected_paths': rejected_paths,
'existing_workspace': has_existing_files,
}
self.ui_manager.ui_data['generation_debug'] = generation_debug
self._append_log(
'LLM returned file candidates: '
f"raw={raw_generated_paths or []}; accepted={accepted_paths or []}; rejected={rejected_paths or []}."
)
self._log_system_debug(
'generation',
'LLM file candidates '
f"raw={raw_generated_paths or []}; accepted={accepted_paths or []}; rejected={rejected_paths or []}; "
f"existing_workspace={has_existing_files}",
)
if has_existing_files:
return generated_files, trace, True
merged_files = {**fallback_files, **generated_files} merged_files = {**fallback_files, **generated_files}
return merged_files, trace return merged_files, trace, False
async def _sync_issue_context(self) -> None: async def _sync_issue_context(self) -> None:
"""Sync repository issues and resolve a linked issue from the prompt when present.""" """Sync repository issues and resolve a linked issue from the prompt when present."""
@@ -479,6 +593,16 @@ class AgentOrchestrator:
f"Prompt: {self.prompt_text or self.description}\n\n" f"Prompt: {self.prompt_text or self.description}\n\n"
f"Branch: {self.branch_name}" f"Branch: {self.branch_name}"
) )
pull_request_debug = self.ui_manager.ui_data.setdefault('git', {}).setdefault('pull_request_debug', {})
pull_request_request = {
'owner': self.repo_owner,
'repo': self.repo_name,
'title': title,
'body': body,
'base': 'main',
'head': self.gitea_api._normalize_pull_request_head(self.branch_name, self.repo_owner) or self.branch_name,
}
pull_request_debug['request'] = pull_request_request
result = await self.gitea_api.create_pull_request( result = await self.gitea_api.create_pull_request(
title=title, title=title,
body=body, body=body,
@@ -487,7 +611,9 @@ class AgentOrchestrator:
base='main', base='main',
head=self.branch_name, head=self.branch_name,
) )
pull_request_debug['response'] = result
if result.get('error'): if result.get('error'):
pull_request_debug['status'] = 'error'
raise RuntimeError(f"Unable to create pull request: {result.get('error')}") raise RuntimeError(f"Unable to create pull request: {result.get('error')}")
pr_number = result.get('number') or result.get('id') or 0 pr_number = result.get('number') or result.get('id') or 0
@@ -502,6 +628,8 @@ class AgentOrchestrator:
'merged': bool(result.get('merged')), 'merged': bool(result.get('merged')),
'pr_state': result.get('state', 'open'), 'pr_state': result.get('state', 'open'),
} }
pull_request_debug['status'] = 'created'
pull_request_debug['resolved'] = pr_data
if self.db_manager and self.history: if self.db_manager and self.history:
self.db_manager.save_pr_data(self.history.id, pr_data) self.db_manager.save_pr_data(self.history.id, pr_data)
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id) if self.db_manager else pr_data self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id) if self.db_manager else pr_data
@@ -553,6 +681,35 @@ class AgentOrchestrator:
if self.db_manager and self.history: if self.db_manager and self.history:
self.db_manager._log_action(self.history.id, "INFO", message) self.db_manager._log_action(self.history.id, "INFO", message)
def _log_system_debug(self, component: str, message: str, level: str = 'INFO') -> None:
"""Persist a system-level debug breadcrumb for generation and git decisions."""
if not self.db_manager:
return
self.db_manager.log_system_event(component=component, level=level, message=f"{self.project_id}: {message}")
def _extract_raw_generated_paths(self, content: str | None) -> list[str]:
"""Return all file paths proposed by the LLM response before safety filtering."""
if not content:
return []
try:
parsed = json.loads(content)
except Exception:
return []
raw_paths: list[str] = []
if isinstance(parsed, dict) and isinstance(parsed.get('files'), list):
for item in parsed['files']:
if not isinstance(item, dict):
continue
path = str(item.get('path') or '').strip()
if path:
raw_paths.append(path)
elif isinstance(parsed, dict):
for path in parsed.keys():
normalized_path = str(path).strip()
if normalized_path:
raw_paths.append(normalized_path)
return raw_paths
def _update_progress(self, progress: int, step: str, message: str) -> None: def _update_progress(self, progress: int, step: str, message: str) -> None:
self.progress = progress self.progress = progress
self.current_step = step self.current_step = step
@@ -571,6 +728,8 @@ class AgentOrchestrator:
target.parent.mkdir(parents=True, exist_ok=True) target.parent.mkdir(parents=True, exist_ok=True)
change_type = "UPDATE" if target.exists() else "CREATE" change_type = "UPDATE" if target.exists() else "CREATE"
previous_content = target.read_text(encoding="utf-8") if target.exists() else "" previous_content = target.read_text(encoding="utf-8") if target.exists() else ""
if previous_content == content:
return
diff_text = self._build_diff_text(relative_path, previous_content, content) diff_text = self._build_diff_text(relative_path, previous_content, content)
target.write_text(content, encoding="utf-8") target.write_text(content, encoding="utf-8")
self.changed_files.append(relative_path) self.changed_files.append(relative_path)
@@ -679,9 +838,12 @@ class AgentOrchestrator:
async def _generate_code(self) -> None: async def _generate_code(self) -> None:
"""Generate code using Ollama.""" """Generate code using Ollama."""
generated_files, trace = await self._generate_prompt_driven_files() change_count_before = len(self.pending_code_changes)
generated_files, trace, editing_existing_workspace = await self._generate_prompt_driven_files()
for relative_path, content in generated_files.items(): for relative_path, content in generated_files.items():
self._write_file(relative_path, content) self._write_file(relative_path, content)
if editing_existing_workspace and len(self.pending_code_changes) == change_count_before:
raise RuntimeError('The LLM response did not produce any file changes for the existing project.')
fallback_used = bool(trace and trace.get('fallback_used')) or trace is None fallback_used = bool(trace and trace.get('fallback_used')) or trace is None
if self.db_manager and self.history and self.prompt_audit and trace: if self.db_manager and self.history and self.prompt_audit and trace:
self.db_manager.log_llm_trace( self.db_manager.log_llm_trace(
@@ -711,11 +873,25 @@ class AgentOrchestrator:
async def _commit_to_git(self) -> None: async def _commit_to_git(self) -> None:
"""Commit changes to git.""" """Commit changes to git."""
unique_files = list(dict.fromkeys(self.changed_files)) unique_files = list(dict.fromkeys(self.changed_files))
git_debug = self.ui_manager.ui_data.setdefault('git', {})
if not unique_files: if not unique_files:
git_debug.update({
'commit_status': 'skipped',
'early_exit_reason': 'changed_files_empty',
'candidate_files': [],
})
self._append_log('Git commit skipped: no generated files were marked as changed.')
self._log_system_debug('git', 'Commit exited early because changed_files was empty.')
return return
if not self.git_manager.is_git_available(): if not self.git_manager.is_git_available():
self.ui_manager.ui_data.setdefault('git', {})['error'] = 'git executable is not available in PATH' git_debug.update({
'commit_status': 'error',
'early_exit_reason': 'git_unavailable',
'candidate_files': unique_files,
'error': 'git executable is not available in PATH',
})
self._append_log('Git commit skipped: git executable is not available in PATH') self._append_log('Git commit skipped: git executable is not available in PATH')
self._log_system_debug('git', 'Commit exited early because git is unavailable.', level='ERROR')
return return
try: try:
@@ -723,7 +899,23 @@ class AgentOrchestrator:
self.git_manager.init_repo() self.git_manager.init_repo()
base_commit = self.git_manager.current_head_or_none() base_commit = self.git_manager.current_head_or_none()
self.git_manager.add_files(unique_files) self.git_manager.add_files(unique_files)
if not self.git_manager.get_status(): status_after_add = self.git_manager.get_status()
if not status_after_add:
git_debug.update({
'commit_status': 'skipped',
'early_exit_reason': 'clean_after_staging',
'candidate_files': unique_files,
'status_after_add': '',
})
self._append_log(
'Git commit skipped: working tree was clean after staging candidate files '
f'{unique_files}. No repository diff was created.'
)
self._log_system_debug(
'git',
'Commit exited early because git status was clean after staging '
f'files={unique_files}',
)
return return
commit_message = f"AI generation for prompt: {self.project_name}" commit_message = f"AI generation for prompt: {self.project_name}"
@@ -736,11 +928,17 @@ class AgentOrchestrator:
"scope": "local", "scope": "local",
"branch": self.branch_name, "branch": self.branch_name,
} }
git_debug.update({
'commit_status': 'committed',
'early_exit_reason': None,
'candidate_files': unique_files,
'status_after_add': status_after_add,
})
remote_record = None remote_record = None
try: try:
remote_record = await self._push_remote_commit(commit_hash, commit_message, unique_files, base_commit) remote_record = await self._push_remote_commit(commit_hash, commit_message, unique_files, base_commit)
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as remote_exc: except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as remote_exc:
self.ui_manager.ui_data.setdefault("git", {})["remote_error"] = str(remote_exc) git_debug["remote_error"] = str(remote_exc)
self._append_log(f"Remote git push skipped: {remote_exc}") self._append_log(f"Remote git push skipped: {remote_exc}")
if remote_record: if remote_record:
@@ -750,8 +948,8 @@ class AgentOrchestrator:
if remote_record.get('pull_request'): if remote_record.get('pull_request'):
commit_record['pull_request'] = remote_record['pull_request'] commit_record['pull_request'] = remote_record['pull_request']
self.ui_manager.ui_data['pull_request'] = remote_record['pull_request'] self.ui_manager.ui_data['pull_request'] = remote_record['pull_request']
self.ui_manager.ui_data.setdefault("git", {})["latest_commit"] = commit_record git_debug["latest_commit"] = commit_record
self.ui_manager.ui_data.setdefault("git", {})["commits"] = [commit_record] git_debug["commits"] = [commit_record]
self._append_log(f"Recorded git commit {commit_hash[:12]} for generated files.") self._append_log(f"Recorded git commit {commit_hash[:12]} for generated files.")
if self.db_manager: if self.db_manager:
self.db_manager.log_commit( self.db_manager.log_commit(
@@ -797,7 +995,12 @@ class AgentOrchestrator:
commit_url=remote_record.get('commit_url') if remote_record else None, commit_url=remote_record.get('commit_url') if remote_record else None,
) )
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc: except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc:
self.ui_manager.ui_data.setdefault("git", {})["error"] = str(exc) git_debug.update({
'commit_status': 'error',
'early_exit_reason': 'commit_exception',
'candidate_files': unique_files,
'error': str(exc),
})
self._append_log(f"Git commit skipped: {exc}") self._append_log(f"Git commit skipped: {exc}")
async def _create_pr(self) -> None: async def _create_pr(self) -> None:

View File

@@ -96,6 +96,8 @@ class RequestInterpreter:
parsed = json.loads(content) parsed = json.loads(content)
interpreted = self._normalize_interpreted_request(parsed, normalized) interpreted = self._normalize_interpreted_request(parsed, normalized)
routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context) routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context)
if routing.get('intent') == 'continue_project' and routing.get('project_name'):
interpreted['name'] = routing['project_name']
naming_trace = None naming_trace = None
if routing.get('intent') == 'new_project': if routing.get('intent') == 'new_project':
interpreted, routing, naming_trace = await self._refine_new_project_identity( interpreted, routing, naming_trace = await self._refine_new_project_identity(
@@ -265,6 +267,14 @@ class RequestInterpreter:
matched_project = project matched_project = project
break break
intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project') intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project')
if matched_project is None and intent == 'continue_project':
recent_chat_history = context.get('recent_chat_history', [])
recent_project_id = recent_chat_history[0].get('project_id') if recent_chat_history else None
if recent_project_id:
matched_project = next(
(project for project in context.get('projects', []) if project.get('project_id') == recent_project_id),
None,
)
normalized = { normalized = {
'intent': intent, 'intent': intent,
'project_id': matched_project.get('project_id') if matched_project else project_id, 'project_id': matched_project.get('project_id') if matched_project else project_id,

View File

@@ -214,6 +214,70 @@ def _render_commit_list(commits: list[dict]) -> None:
ui.link('Open compare view', compare_url, new_tab=True) ui.link('Open compare view', compare_url, new_tab=True)
def _render_generation_diagnostics(ui_data: dict | None) -> None:
"""Render generation and git diagnostics from the latest UI snapshot."""
snapshot = ui_data if isinstance(ui_data, dict) else {}
generation_debug = snapshot.get('generation_debug') if isinstance(snapshot.get('generation_debug'), dict) else {}
git_debug = snapshot.get('git') if isinstance(snapshot.get('git'), dict) else {}
if not generation_debug and not git_debug:
ui.label('No generation diagnostics captured yet.').classes('factory-muted')
return
def _render_path_row(label: str, values: list[str]) -> None:
text = ', '.join(values) if values else 'none'
ui.label(f'{label}: {text}').classes('factory-muted' if values else 'factory-code')
with ui.column().classes('gap-3 w-full'):
if generation_debug:
with ui.column().classes('gap-1'):
ui.label('Generation filtering').style('font-weight: 700; color: #2f241d;')
ui.label(
'Existing workspace: '
+ ('yes' if generation_debug.get('existing_workspace') else 'no')
).classes('factory-muted')
_render_path_row('Raw paths', generation_debug.get('raw_paths') or [])
_render_path_row('Accepted paths', generation_debug.get('accepted_paths') or [])
_render_path_row('Rejected paths', generation_debug.get('rejected_paths') or [])
if git_debug:
with ui.column().classes('gap-1'):
ui.label('Git outcome').style('font-weight: 700; color: #2f241d;')
if git_debug.get('commit_status'):
with ui.row().classes('items-center gap-2'):
ui.label(git_debug['commit_status']).classes('factory-chip')
if git_debug.get('early_exit_reason'):
ui.label(git_debug['early_exit_reason']).classes('factory-chip')
if git_debug.get('candidate_files') is not None:
_render_path_row('Candidate files', git_debug.get('candidate_files') or [])
latest_commit = git_debug.get('latest_commit') if isinstance(git_debug.get('latest_commit'), dict) else {}
if latest_commit:
ui.label(
f"Latest commit: {(latest_commit.get('hash') or 'unknown')[:12]} · {latest_commit.get('scope') or 'local'}"
).classes('factory-muted')
if git_debug.get('status_after_add'):
with ui.expansion('Git status after staging').classes('w-full q-mt-sm'):
ui.label(str(git_debug['status_after_add'])).classes('factory-code')
if git_debug.get('remote_error'):
ui.label(f"Remote push error: {git_debug['remote_error']}").classes('factory-code')
if git_debug.get('error'):
ui.label(f"Git error: {git_debug['error']}").classes('factory-code')
pull_request_debug = git_debug.get('pull_request_debug') if isinstance(git_debug.get('pull_request_debug'), dict) else {}
if pull_request_debug:
ui.label('Pull request creation').style('font-weight: 700; color: #2f241d;')
if pull_request_debug.get('status'):
ui.label(str(pull_request_debug['status'])).classes('factory-chip')
if pull_request_debug.get('request'):
with ui.expansion('PR request payload').classes('w-full q-mt-sm'):
ui.label(json.dumps(pull_request_debug['request'], indent=2, sort_keys=True)).classes('factory-code')
if pull_request_debug.get('response'):
with ui.expansion('PR API response').classes('w-full q-mt-sm'):
ui.label(json.dumps(pull_request_debug['response'], indent=2, sort_keys=True)).classes('factory-code')
if pull_request_debug.get('resolved'):
resolved = pull_request_debug['resolved']
if resolved.get('pr_url'):
ui.link('Open pull request', resolved['pr_url'], new_tab=True).classes('factory-code')
def _render_timeline(events: list[dict]) -> None: def _render_timeline(events: list[dict]) -> None:
"""Render a mixed project timeline.""" """Render a mixed project timeline."""
if not events: if not events:
@@ -1576,6 +1640,9 @@ def create_dashboard():
'Sync Repo Activity', 'Sync Repo Activity',
on_click=lambda _=None, project_id=project['project_id']: sync_project_repository_action(project_id), on_click=lambda _=None, project_id=project['project_id']: sync_project_repository_action(project_id),
).props('outline color=secondary').classes('q-mt-md') ).props('outline color=secondary').classes('q-mt-md')
with ui.card().classes('q-pa-md'):
ui.label('Generation Diagnostics').style('font-weight: 700; color: #3a281a;')
_render_generation_diagnostics(project_bundle.get('ui_data'))
@ui.refreshable @ui.refreshable
def render_archived_panel() -> None: def render_archived_panel() -> None:
@@ -1642,6 +1709,9 @@ def create_dashboard():
with ui.card().classes('q-pa-md'): with ui.card().classes('q-pa-md'):
ui.label('Repository').style('font-weight: 700; color: #3a281a;') ui.label('Repository').style('font-weight: 700; color: #3a281a;')
_render_repository_block(project_bundle.get('repository') or project.get('repository')) _render_repository_block(project_bundle.get('repository') or project.get('repository'))
with ui.card().classes('q-pa-md'):
ui.label('Generation Diagnostics').style('font-weight: 700; color: #3a281a;')
_render_generation_diagnostics(project_bundle.get('ui_data'))
with ui.card().classes('q-pa-md'): with ui.card().classes('q-pa-md'):
ui.label('Prompt').style('font-weight: 700; color: #3a281a;') ui.label('Prompt').style('font-weight: 700; color: #3a281a;')
prompts = project_bundle.get('prompts', []) prompts = project_bundle.get('prompts', [])

View File

@@ -241,6 +241,17 @@ def _serialize_project_log(log: ProjectLog) -> dict:
} }
def _ensure_summary_mentions_pull_request(summary_message: str, pull_request: dict | None) -> str:
"""Append the pull request URL to chat summaries when one exists."""
if not isinstance(pull_request, dict):
return summary_message
pr_url = (pull_request.get('pr_url') or '').strip()
if not pr_url or pr_url in summary_message:
return summary_message
separator = '' if summary_message.endswith(('.', '!', '?')) else '.'
return f"{summary_message}{separator} Review PR: {pr_url}"
def _serialize_system_log(log: SystemLog) -> dict: def _serialize_system_log(log: SystemLog) -> dict:
"""Serialize a system log row.""" """Serialize a system log row."""
return { return {
@@ -306,7 +317,7 @@ async def _run_generation(
resolved_prompt_text = prompt_text or _compose_prompt_text(request) resolved_prompt_text = prompt_text or _compose_prompt_text(request)
if preferred_project_id and reusable_history is not None: if preferred_project_id and reusable_history is not None:
project_id = reusable_history.project_id project_id = reusable_history.project_id
elif reusable_history and not is_explicit_new_project and manager.get_open_pull_request(project_id=reusable_history.project_id): elif reusable_history and not is_explicit_new_project:
project_id = reusable_history.project_id project_id = reusable_history.project_id
else: else:
if is_explicit_new_project or prompt_text: if is_explicit_new_project or prompt_text:
@@ -348,6 +359,8 @@ async def _run_generation(
response_data = _serialize_project(history) response_data = _serialize_project(history)
response_data['logs'] = [_serialize_project_log(log) for log in project_logs] response_data['logs'] = [_serialize_project_log(log) for log in project_logs]
response_data['ui_data'] = result.get('ui_data') response_data['ui_data'] = result.get('ui_data')
response_data['generation_debug'] = ((result.get('ui_data') or {}).get('generation_debug'))
response_data['git_debug'] = ((result.get('ui_data') or {}).get('git'))
response_data['features'] = request.features response_data['features'] = request.features
response_data['tech_stack'] = request.tech_stack response_data['tech_stack'] = request.tech_stack
response_data['project_root'] = result.get('project_root', str(_project_root(project_id))) response_data['project_root'] = result.get('project_root', str(_project_root(project_id)))
@@ -389,6 +402,7 @@ async def _run_generation(
'logs': [log.get('message', '') for log in response_data.get('logs', []) if isinstance(log, dict)], 'logs': [log.get('message', '') for log in response_data.get('logs', []) if isinstance(log, dict)],
} }
summary_message, summary_trace = await ChangeSummaryGenerator().summarize_with_trace(summary_context) summary_message, summary_trace = await ChangeSummaryGenerator().summarize_with_trace(summary_context)
summary_message = _ensure_summary_mentions_pull_request(summary_message, response_data.get('pull_request'))
if orchestrator.db_manager and orchestrator.history and orchestrator.prompt_audit: if orchestrator.db_manager and orchestrator.history and orchestrator.prompt_audit:
orchestrator.db_manager.log_llm_trace( orchestrator.db_manager.log_llm_trace(
project_id=project_id, project_id=project_id,