10 Commits
0.9.3 ... 0.9.8

Author SHA1 Message Date
1c539d5f60 release: version 0.9.8 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 12s
Upload Python Package / deploy (push) Successful in 29s
2026-04-11 16:32:23 +02:00
64fcd2967c fix: more file change fixes, refs NOISSUE 2026-04-11 16:32:19 +02:00
4d050ff527 release: version 0.9.7 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 22s
Upload Python Package / deploy (push) Successful in 1m19s
2026-04-11 14:33:47 +02:00
1944e2a9cf fix: more file generation improvements, refs NOISSUE 2026-04-11 14:33:45 +02:00
7e4066c609 release: version 0.9.6 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 15s
Upload Python Package / deploy (push) Successful in 39s
2026-04-11 13:37:52 +02:00
4eeec5d808 fix: repo onboarding fix, refs NOISSUE 2026-04-11 13:37:49 +02:00
cbbed83915 release: version 0.9.5 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 18s
Upload Python Package / deploy (push) Successful in 30s
2026-04-11 13:27:26 +02:00
1e72bc9a28 fix: better code generation, refs NOISSUE 2026-04-11 13:27:23 +02:00
b0c95323fd release: version 0.9.4 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 24s
Upload Python Package / deploy (push) Successful in 56s
2026-04-11 13:06:54 +02:00
d60e753acf fix: add commit retry, refs NOISSUE 2026-04-11 13:06:48 +02:00
10 changed files with 812 additions and 104 deletions

View File

@@ -5,10 +5,65 @@ Changelog
(unreleased) (unreleased)
------------ ------------
Fix
~~~
- More file change fixes, refs NOISSUE. [Simon Diesenreiter]
0.9.7 (2026-04-11)
------------------
Fix
~~~
- More file generation improvements, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.6 (2026-04-11)
------------------
Fix
~~~
- Repo onboarding fix, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.5 (2026-04-11)
------------------
Fix
~~~
- Better code generation, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.4 (2026-04-11)
------------------
Fix
~~~
- Add commit retry, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.3 (2026-04-11)
------------------
Fix Fix
~~~ ~~~
- Better home assistant integration, refs NOISSUE. [Simon Diesenreiter] - Better home assistant integration, refs NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.2 (2026-04-11) 0.9.2 (2026-04-11)
------------------ ------------------

View File

@@ -118,6 +118,8 @@ The dashboard Health tab exposes operator controls for the prompt queue, includi
The dashboard System tab now also stores Home Assistant entity ids, queue toggles, thresholds, and batch settings in the database, so the environment only needs `HOME_ASSISTANT_URL` and `HOME_ASSISTANT_TOKEN` for that integration. The dashboard System tab now also stores Home Assistant entity ids, queue toggles, thresholds, and batch settings in the database, so the environment only needs `HOME_ASSISTANT_URL` and `HOME_ASSISTANT_TOKEN` for that integration.
Projects that show `uncommitted`, `local_only`, or `pushed_no_pr` delivery warnings in the dashboard can now be retried in place from the UI before resorting to purging orphan audit rows.
Guardrail and system prompts are no longer environment-only in practice: the factory can persist DB-backed overrides for the editable LLM prompt set, expose them at `/llm/prompts`, and edit them from the dashboard System tab. Environment values still act as defaults and as the reset target. Guardrail and system prompts are no longer environment-only in practice: the factory can persist DB-backed overrides for the editable LLM prompt set, expose them at `/llm/prompts`, and edit them from the dashboard System tab. Environment values still act as defaults and as the reset target.
## API Endpoints ## API Endpoints

View File

@@ -1 +1 @@
0.9.3 0.9.8

View File

@@ -35,6 +35,7 @@ from datetime import datetime
import json import json
import re import re
import shutil import shutil
from pathlib import Path
class DatabaseMigrations: class DatabaseMigrations:
@@ -125,20 +126,54 @@ class DatabaseManager:
return sanitized.strip('-') or 'external-project' return sanitized.strip('-') or 'external-project'
@staticmethod @staticmethod
def _partition_code_changes(raw_code_changes: list[dict], commits: list[dict]) -> tuple[list[dict], list[dict]]: def _partition_code_changes(raw_code_changes: list[dict], commits: list[dict]) -> tuple[list[dict], list[dict], list[dict]]:
"""Split code changes into visible committed rows and orphaned rows.""" """Split code changes into remotely delivered, local-only, and orphaned rows."""
committed_hashes = {commit.get('commit_hash') for commit in commits if commit.get('commit_hash')} published_hashes = {
committed_prompt_ids = {commit.get('prompt_id') for commit in commits if commit.get('prompt_id') is not None} commit.get('commit_hash')
for commit in commits
if commit.get('commit_hash') and (
commit.get('remote_status') == 'pushed'
or commit.get('imported_from_remote')
or commit.get('commit_url')
)
}
published_prompt_ids = {
commit.get('prompt_id')
for commit in commits
if commit.get('prompt_id') is not None and (
commit.get('remote_status') == 'pushed'
or commit.get('imported_from_remote')
or commit.get('commit_url')
)
}
local_commit_hashes = {commit.get('commit_hash') for commit in commits if commit.get('commit_hash')}
local_prompt_ids = {commit.get('prompt_id') for commit in commits if commit.get('prompt_id') is not None}
visible_changes: list[dict] = [] visible_changes: list[dict] = []
local_only_changes: list[dict] = []
orphaned_changes: list[dict] = [] orphaned_changes: list[dict] = []
for change in raw_code_changes: for change in raw_code_changes:
change_commit_hash = change.get('commit_hash') change_commit_hash = change.get('commit_hash')
prompt_id = change.get('prompt_id') prompt_id = change.get('prompt_id')
if (change_commit_hash and change_commit_hash in committed_hashes) or (prompt_id is not None and prompt_id in committed_prompt_ids): if (change_commit_hash and change_commit_hash in published_hashes) or (prompt_id is not None and prompt_id in published_prompt_ids):
visible_changes.append(change) visible_changes.append(change)
elif (change_commit_hash and change_commit_hash in local_commit_hashes) or (prompt_id is not None and prompt_id in local_prompt_ids):
local_only_changes.append(change)
else: else:
orphaned_changes.append(change) orphaned_changes.append(change)
return visible_changes, orphaned_changes return visible_changes, local_only_changes, orphaned_changes
@staticmethod
def _dedupe_preserve_order(values: list[str | None]) -> list[str]:
"""Return non-empty values in stable unique order."""
result: list[str] = []
seen: set[str] = set()
for value in values:
normalized = (value or '').strip()
if not normalized or normalized in seen:
continue
seen.add(normalized)
result.append(normalized)
return result
def get_project_by_repository(self, owner: str, repo_name: str, include_archived: bool = False) -> ProjectHistory | None: def get_project_by_repository(self, owner: str, repo_name: str, include_archived: bool = False) -> ProjectHistory | None:
"""Return the project currently associated with a repository.""" """Return the project currently associated with a repository."""
@@ -2260,21 +2295,35 @@ class DatabaseManager:
pull_requests = self.get_pull_requests(project_id=project_id) pull_requests = self.get_pull_requests(project_id=project_id)
llm_traces = self.get_llm_traces(project_id=project_id) llm_traces = self.get_llm_traces(project_id=project_id)
correlations = self.get_prompt_change_correlations(project_id=project_id) correlations = self.get_prompt_change_correlations(project_id=project_id)
code_changes, orphan_code_changes = self._partition_code_changes(raw_code_changes, commits) code_changes, local_only_code_changes, orphan_code_changes = self._partition_code_changes(raw_code_changes, commits)
repository = self._get_project_repository(history) repository = self._get_project_repository(history)
timeline = self.get_project_timeline(project_id=project_id) timeline = self.get_project_timeline(project_id=project_id)
repository_sync = self.get_repository_sync_status(project_id=project_id) repository_sync = self.get_repository_sync_status(project_id=project_id)
issues = self.get_repository_issues(project_id=project_id) issues = self.get_repository_issues(project_id=project_id)
issue_work = self.get_issue_work_events(project_id=project_id) issue_work = self.get_issue_work_events(project_id=project_id)
published_commits = [
commit for commit in commits
if commit.get('remote_status') == 'pushed' or commit.get('imported_from_remote') or commit.get('commit_url')
]
has_pull_request = any(pr.get('pr_state') == 'open' and not pr.get('merged') for pr in pull_requests)
if orphan_code_changes: if orphan_code_changes:
delivery_status = 'uncommitted' delivery_status = 'uncommitted'
delivery_message = ( delivery_message = (
f"{len(orphan_code_changes)} generated file change(s) were recorded without a matching git commit. " f"{len(orphan_code_changes)} generated file change(s) were recorded without a matching git commit. "
"These changes never reached a PR-backed delivery." "These changes never reached a PR-backed delivery."
) )
elif commits: elif local_only_code_changes:
delivery_status = 'local_only'
delivery_message = (
f"{len(local_only_code_changes)} generated file change(s) were committed only in the local workspace. "
"No remote repo push was recorded for this prompt yet."
)
elif published_commits and repository and repository.get('mode') == 'project' and not has_pull_request:
delivery_status = 'pushed_no_pr'
delivery_message = 'Changes were pushed to the remote repository, but no pull request is currently tracked for review.'
elif published_commits:
delivery_status = 'delivered' delivery_status = 'delivered'
delivery_message = 'Generated changes were recorded in git commits for this project.' delivery_message = 'Generated changes were published to the tracked repository and are reviewable through the recorded pull request.'
else: else:
delivery_status = 'pending' delivery_status = 'pending'
delivery_message = 'No git commit has been recorded for this project yet.' delivery_message = 'No git commit has been recorded for this project yet.'
@@ -2295,6 +2344,7 @@ class DatabaseManager:
"open_pull_requests": len([pr for pr in pull_requests if pr["pr_state"] == "open" and not pr["merged"]]), "open_pull_requests": len([pr for pr in pull_requests if pr["pr_state"] == "open" and not pr["merged"]]),
"delivery_status": delivery_status, "delivery_status": delivery_status,
"delivery_message": delivery_message, "delivery_message": delivery_message,
"local_only_code_change_count": len(local_only_code_changes),
"orphan_code_change_count": len(orphan_code_changes), "orphan_code_change_count": len(orphan_code_changes),
"completed_at": history.completed_at.isoformat() if history.completed_at else None, "completed_at": history.completed_at.isoformat() if history.completed_at else None,
"created_at": history.started_at.isoformat() if history.started_at else None "created_at": history.started_at.isoformat() if history.started_at else None
@@ -2334,6 +2384,7 @@ class DatabaseManager:
], ],
"prompts": prompts, "prompts": prompts,
"code_changes": code_changes, "code_changes": code_changes,
"local_only_code_changes": local_only_code_changes,
"orphan_code_changes": orphan_code_changes, "orphan_code_changes": orphan_code_changes,
"commits": commits, "commits": commits,
"pull_requests": pull_requests, "pull_requests": pull_requests,
@@ -2401,9 +2452,21 @@ class DatabaseManager:
"""Correlate prompts with the concrete code changes that followed them.""" """Correlate prompts with the concrete code changes that followed them."""
correlations = self._build_correlations_from_links(project_id=project_id, limit=limit) correlations = self._build_correlations_from_links(project_id=project_id, limit=limit)
if correlations: if correlations:
return [correlation for correlation in correlations if correlation.get('commits')] return [
correlation for correlation in correlations
if any(
commit.get('remote_status') == 'pushed' or commit.get('imported_from_remote') or commit.get('commit_url')
for commit in correlation.get('commits', [])
)
]
fallback = self._build_correlations_from_audit_fallback(project_id=project_id, limit=limit) fallback = self._build_correlations_from_audit_fallback(project_id=project_id, limit=limit)
return [correlation for correlation in fallback if correlation.get('commits')] return [
correlation for correlation in fallback
if any(
commit.get('remote_status') == 'pushed' or commit.get('imported_from_remote') or commit.get('commit_url')
for commit in correlation.get('commits', [])
)
]
def get_dashboard_snapshot(self, limit: int = 8) -> dict: def get_dashboard_snapshot(self, limit: int = 8) -> dict:
"""Return DB-backed dashboard data for the UI.""" """Return DB-backed dashboard data for the UI."""
@@ -2467,6 +2530,329 @@ class DatabaseManager:
}, },
} }
def _build_commit_url(self, owner: str, repo_name: str, commit_hash: str) -> str | None:
"""Build a browser commit URL from configured Gitea settings."""
if not settings.gitea_url or not owner or not repo_name or not commit_hash:
return None
return f"{str(settings.gitea_url).rstrip('/')}/{owner}/{repo_name}/commit/{commit_hash}"
def _update_project_audit_rows_for_delivery(
self,
project_id: str,
branch: str,
owner: str,
repo_name: str,
code_change_ids: list[int],
orphan_code_change_ids: list[int],
published_commit_hashes: list[str],
) -> None:
"""Mark matching commit and code-change rows as remotely published."""
commit_hashes = set(self._dedupe_preserve_order(published_commit_hashes))
for commit_row in self.db.query(AuditTrail).filter(
AuditTrail.project_id == project_id,
AuditTrail.action == 'GIT_COMMIT',
).all():
metadata = self._normalize_metadata(commit_row.metadata_json)
commit_hash = metadata.get('commit_hash')
if not commit_hash or commit_hash not in commit_hashes:
continue
metadata['branch'] = branch
metadata['remote_status'] = 'pushed'
metadata['commit_url'] = self._build_commit_url(owner, repo_name, commit_hash)
commit_row.metadata_json = metadata
retry_ids = set(code_change_ids)
orphan_ids = set(orphan_code_change_ids)
new_commit_hash = next(iter(commit_hashes), None)
for change_row in self.db.query(AuditTrail).filter(
AuditTrail.project_id == project_id,
AuditTrail.action == 'CODE_CHANGE',
).all():
if change_row.id not in retry_ids:
continue
metadata = self._normalize_metadata(change_row.metadata_json)
metadata['branch'] = branch
metadata['remote_status'] = 'pushed'
if change_row.id in orphan_ids and new_commit_hash:
metadata['commit_hash'] = new_commit_hash
change_row.metadata_json = metadata
self.db.commit()
def _find_or_create_delivery_pull_request(
self,
history: ProjectHistory,
gitea_api,
owner: str,
repo_name: str,
branch: str,
prompt_text: str | None,
) -> dict:
"""Return an open PR for the project branch, creating one if necessary."""
existing = self.get_open_pull_request(project_id=history.project_id)
if existing is not None:
return existing
remote_prs = gitea_api.list_pull_requests_sync(owner=owner, repo=repo_name, state='open')
if isinstance(remote_prs, list):
for item in remote_prs:
remote_head = ((item.get('head') or {}) if isinstance(item.get('head'), dict) else {})
if remote_head.get('ref') != branch:
continue
pr = self.save_pr_data(
history.id,
{
'pr_number': item.get('number') or item.get('id') or 0,
'title': item.get('title') or f"AI delivery for {history.project_name}",
'body': item.get('body') or '',
'state': item.get('state', 'open'),
'base': ((item.get('base') or {}) if isinstance(item.get('base'), dict) else {}).get('ref', 'main'),
'user': ((item.get('user') or {}) if isinstance(item.get('user'), dict) else {}).get('login', 'system'),
'pr_url': item.get('html_url') or gitea_api.build_pull_request_url(item.get('number') or item.get('id'), owner=owner, repo=repo_name),
'merged': bool(item.get('merged')),
'head': remote_head.get('ref'),
},
)
return {
'pr_number': pr.pr_number,
'title': pr.pr_title,
'body': pr.pr_body,
'pr_url': pr.pr_url,
'pr_state': pr.pr_state,
'merged': pr.merged,
}
title = f"AI delivery for {history.project_name}"
body = (
f"Automated software factory changes for {history.project_name}.\n\n"
f"Prompt: {prompt_text or history.description}\n\n"
f"Branch: {branch}"
)
created = gitea_api.create_pull_request_sync(
title=title,
body=body,
owner=owner,
repo=repo_name,
base='main',
head=branch,
)
if created.get('error'):
raise RuntimeError(f"Unable to create pull request: {created.get('error')}")
pr = self.save_pr_data(
history.id,
{
'pr_number': created.get('number') or created.get('id') or 0,
'title': created.get('title', title),
'body': created.get('body', body),
'state': created.get('state', 'open'),
'base': ((created.get('base') or {}) if isinstance(created.get('base'), dict) else {}).get('ref', 'main'),
'user': ((created.get('user') or {}) if isinstance(created.get('user'), dict) else {}).get('login', 'system'),
'pr_url': created.get('html_url') or gitea_api.build_pull_request_url(created.get('number') or created.get('id'), owner=owner, repo=repo_name),
'merged': bool(created.get('merged')),
'head': branch,
},
)
return {
'pr_number': pr.pr_number,
'title': pr.pr_title,
'body': pr.pr_body,
'pr_url': pr.pr_url,
'pr_state': pr.pr_state,
'merged': pr.merged,
}
def retry_project_delivery(self, project_id: str) -> dict:
"""Retry remote delivery for orphaned, local-only, or missing-PR project changes."""
history = self.get_project_by_id(project_id)
if history is None:
return {'status': 'error', 'message': 'Project not found'}
audit_data = self.get_project_audit_data(project_id)
project = audit_data.get('project') or {}
delivery_status = project.get('delivery_status')
if delivery_status not in {'uncommitted', 'local_only', 'pushed_no_pr'}:
return {'status': 'success', 'message': 'No failed delivery state was found for this project.', 'project_id': project_id}
snapshot_data = self._get_latest_ui_snapshot_data(history.id)
repository = self._get_project_repository(history) or {}
if repository.get('mode') != 'project':
return {'status': 'error', 'message': 'Only project-scoped repositories support delivery retry.', 'project_id': project_id}
owner = repository.get('owner') or settings.gitea_owner
repo_name = repository.get('name') or settings.gitea_repo
if not owner or not repo_name or not settings.gitea_url or not settings.gitea_token:
return {'status': 'error', 'message': 'Gitea repository settings are incomplete; cannot retry delivery.', 'project_id': project_id}
project_root = Path(snapshot_data.get('project_root') or (settings.projects_root / project_id)).expanduser().resolve()
if not project_root.exists():
return {'status': 'error', 'message': f'Project workspace does not exist at {project_root}', 'project_id': project_id}
try:
from .git_manager import GitManager
from .gitea import GiteaAPI
except ImportError:
from agents.git_manager import GitManager
from agents.gitea import GiteaAPI
git_manager = GitManager(project_id=project_id, project_dir=str(project_root))
if not git_manager.is_git_available():
return {'status': 'error', 'message': 'git executable is not available in PATH', 'project_id': project_id}
if not git_manager.has_repo():
return {'status': 'error', 'message': 'Local git repository is missing; cannot retry delivery safely.', 'project_id': project_id}
commits = audit_data.get('commits', [])
local_only_changes = audit_data.get('local_only_code_changes', [])
orphan_changes = audit_data.get('orphan_code_changes', [])
published_commits = [
commit for commit in commits
if commit.get('remote_status') == 'pushed' or commit.get('imported_from_remote') or commit.get('commit_url')
]
branch_candidates = [
*(change.get('branch') for change in local_only_changes),
*(change.get('branch') for change in orphan_changes),
*(commit.get('branch') for commit in commits),
((snapshot_data.get('git') or {}).get('active_branch') if isinstance(snapshot_data.get('git'), dict) else None),
f'ai/{project_id}',
]
branch = self._dedupe_preserve_order(branch_candidates)[0]
head = git_manager.current_head_or_none()
if head is None:
return {'status': 'error', 'message': 'Local repository has no commits; retry delivery cannot determine a safe base commit.', 'project_id': project_id}
if git_manager.branch_exists(branch):
git_manager.checkout_branch(branch)
else:
git_manager.checkout_branch(branch, create=True, start_point=head)
code_change_ids = [change['id'] for change in local_only_changes] + [change['id'] for change in orphan_changes]
orphan_ids = [change['id'] for change in orphan_changes]
published_commit_hashes = [commit.get('commit_hash') for commit in published_commits if commit.get('commit_hash')]
if orphan_changes:
files_to_commit = self._dedupe_preserve_order([change.get('file_path') for change in orphan_changes])
missing_files = [path for path in files_to_commit if not (project_root / path).exists()]
if missing_files:
return {
'status': 'error',
'message': f"Cannot retry delivery because generated files are missing locally: {', '.join(missing_files)}",
'project_id': project_id,
}
git_manager.add_files(files_to_commit)
if not git_manager.get_status():
return {
'status': 'error',
'message': 'No local git changes remain for the orphaned files; purge them or regenerate the project.',
'project_id': project_id,
}
commit_message = f"Retry AI delivery for prompt: {history.project_name}"
retried_commit_hash = git_manager.commit(commit_message)
prompt_id = max((change.get('prompt_id') for change in orphan_changes if change.get('prompt_id') is not None), default=None)
self.log_commit(
project_id=project_id,
commit_message=commit_message,
actor='dashboard',
actor_type='operator',
history_id=history.id,
prompt_id=prompt_id,
commit_hash=retried_commit_hash,
changed_files=files_to_commit,
branch=branch,
remote_status='local-only',
)
published_commit_hashes.append(retried_commit_hash)
gitea_api = GiteaAPI(token=settings.gitea_token, base_url=settings.gitea_url, owner=owner, repo=repo_name)
user = gitea_api.get_current_user_sync()
if user.get('error'):
return {'status': 'error', 'message': f"Unable to authenticate with Gitea: {user.get('error')}", 'project_id': project_id}
clone_url = repository.get('clone_url') or gitea_api.build_repo_git_url(owner=owner, repo=repo_name)
if not clone_url:
return {'status': 'error', 'message': 'Repository clone URL could not be determined for retry delivery.', 'project_id': project_id}
try:
git_manager.push_with_credentials(
remote_url=clone_url,
username=user.get('login') or 'git',
password=settings.gitea_token,
remote='origin',
branch=branch,
)
except Exception as exc:
self.log_system_event(component='git', level='ERROR', message=f'Retry delivery push failed for {project_id}: {exc}')
return {'status': 'error', 'message': f'Remote git push failed: {exc}', 'project_id': project_id}
if not published_commit_hashes:
head_commit = git_manager.current_head_or_none()
if head_commit:
published_commit_hashes.append(head_commit)
prompt_text = (audit_data.get('prompts') or [{}])[0].get('prompt_text') if audit_data.get('prompts') else None
try:
pull_request = self._find_or_create_delivery_pull_request(history, gitea_api, owner, repo_name, branch, prompt_text)
except Exception as exc:
self.log_system_event(component='gitea', level='ERROR', message=f'Retry delivery PR creation failed for {project_id}: {exc}')
return {'status': 'error', 'message': str(exc), 'project_id': project_id}
self._update_project_audit_rows_for_delivery(
project_id=project_id,
branch=branch,
owner=owner,
repo_name=repo_name,
code_change_ids=code_change_ids,
orphan_code_change_ids=orphan_ids,
published_commit_hashes=published_commit_hashes,
)
refreshed_snapshot = dict(snapshot_data)
refreshed_git = dict(refreshed_snapshot.get('git') or {})
latest_commit_hash = self._dedupe_preserve_order(published_commit_hashes)[-1]
latest_commit = dict(refreshed_git.get('latest_commit') or {})
latest_commit.update(
{
'hash': latest_commit_hash,
'scope': 'remote',
'branch': branch,
'commit_url': gitea_api.build_commit_url(latest_commit_hash, owner=owner, repo=repo_name),
}
)
refreshed_git['latest_commit'] = latest_commit
refreshed_git['active_branch'] = branch
refreshed_git['remote_error'] = None
refreshed_git['remote_push'] = {
'status': 'pushed',
'remote': clone_url,
'branch': branch,
'commit_url': latest_commit.get('commit_url'),
'pull_request': pull_request,
}
refreshed_snapshot['git'] = refreshed_git
refreshed_repository = dict(repository)
refreshed_repository['last_commit_url'] = latest_commit.get('commit_url')
refreshed_snapshot['repository'] = refreshed_repository
refreshed_snapshot['pull_request'] = pull_request
refreshed_snapshot['project_root'] = str(project_root)
self.save_ui_snapshot(history.id, refreshed_snapshot)
self._log_audit_trail(
project_id=project_id,
action='DELIVERY_RETRIED',
actor='dashboard',
action_type='RETRY',
details=f'Retried remote delivery for branch {branch}',
message='Remote delivery retried successfully',
metadata_json={
'history_id': history.id,
'branch': branch,
'commit_hashes': self._dedupe_preserve_order(published_commit_hashes),
'pull_request': pull_request,
},
)
self.log_system_event(component='git', level='INFO', message=f'Retried remote delivery for {project_id} on {branch}')
return {
'status': 'success',
'message': 'Remote delivery retried successfully.',
'project_id': project_id,
'branch': branch,
'commit_hashes': self._dedupe_preserve_order(published_commit_hashes),
'pull_request': pull_request,
}
def cleanup_orphan_code_changes(self, project_id: str | None = None) -> dict: def cleanup_orphan_code_changes(self, project_id: str | None = None) -> dict:
"""Delete code change rows that cannot be tied to any recorded commit.""" """Delete code change rows that cannot be tied to any recorded commit."""
change_query = self.db.query(AuditTrail).filter(AuditTrail.action == 'CODE_CHANGE') change_query = self.db.query(AuditTrail).filter(AuditTrail.action == 'CODE_CHANGE')
@@ -2493,7 +2879,7 @@ class DatabaseManager:
} }
for change in change_rows for change in change_rows
] ]
_, orphaned_changes = self._partition_code_changes(raw_code_changes, commits) _, _, orphaned_changes = self._partition_code_changes(raw_code_changes, commits)
orphan_ids = [change['id'] for change in orphaned_changes] orphan_ids = [change['id'] for change in orphaned_changes]
orphan_projects = sorted({change['project_id'] for change in orphaned_changes if change.get('project_id')}) orphan_projects = sorted({change['project_id'] for change in orphaned_changes if change.get('project_id')})

View File

@@ -230,6 +230,26 @@ class GiteaAPI:
} }
return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload) return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload)
def create_pull_request_sync(
self,
title: str,
body: str,
owner: str,
repo: str,
base: str = "main",
head: str | None = None,
) -> dict:
"""Synchronously create a pull request."""
_owner = owner or self.owner
_repo = repo or self.repo
payload = {
"title": title,
"body": body,
"base": base,
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
}
return self._request_sync("POST", f"repos/{_owner}/{_repo}/pulls", payload)
async def list_pull_requests( async def list_pull_requests(
self, self,
owner: str | None = None, owner: str | None = None,
@@ -402,3 +422,13 @@ class GiteaAPI:
return {"error": "Repository name required for org operations"} return {"error": "Repository name required for org operations"}
return await self._request("GET", f"repos/{_owner}/{_repo}") return await self._request("GET", f"repos/{_owner}/{_repo}")
def get_repo_info_sync(self, owner: str | None = None, repo: str | None = None) -> dict:
"""Synchronously get repository information."""
_owner = owner or self.owner
_repo = repo or self.repo
if not _repo:
return {"error": "Repository name required for org operations"}
return self._request_sync("GET", f"repos/{_owner}/{_repo}")

View File

@@ -3,9 +3,11 @@
from __future__ import annotations from __future__ import annotations
import difflib import difflib
import json
import py_compile import py_compile
import re import re
import subprocess import subprocess
from pathlib import PurePosixPath
from typing import Optional from typing import Optional
from datetime import datetime from datetime import datetime
@@ -14,18 +16,27 @@ try:
from .database_manager import DatabaseManager from .database_manager import DatabaseManager
from .git_manager import GitManager from .git_manager import GitManager
from .gitea import GiteaAPI from .gitea import GiteaAPI
from .llm_service import LLMServiceClient
from .ui_manager import UIManager from .ui_manager import UIManager
except ImportError: except ImportError:
from config import settings from config import settings
from agents.database_manager import DatabaseManager from agents.database_manager import DatabaseManager
from agents.git_manager import GitManager from agents.git_manager import GitManager
from agents.gitea import GiteaAPI from agents.gitea import GiteaAPI
from agents.llm_service import LLMServiceClient
from agents.ui_manager import UIManager from agents.ui_manager import UIManager
class AgentOrchestrator: class AgentOrchestrator:
"""Orchestrates the software generation process with full audit trail.""" """Orchestrates the software generation process with full audit trail."""
REMOTE_READY_REPOSITORY_MODES = {'project', 'onboarded'}
REMOTE_READY_REPOSITORY_STATUSES = {'created', 'exists', 'ready', 'onboarded'}
GENERATED_TEXT_FILE_SUFFIXES = {'.py', '.md', '.txt', '.toml', '.yaml', '.yml', '.json', '.ini', '.cfg', '.sh', '.html', '.css', '.js', '.ts'}
GENERATED_TEXT_FILE_NAMES = {'README', 'README.md', '.gitignore', 'requirements.txt', 'pyproject.toml', 'Dockerfile', 'Containerfile', 'Makefile'}
MAX_WORKSPACE_CONTEXT_FILES = 20
MAX_WORKSPACE_CONTEXT_CHARS = 24000
def __init__( def __init__(
self, self,
project_id: str, project_id: str,
@@ -77,6 +88,7 @@ class AgentOrchestrator:
self.branch_name = self._build_pr_branch_name(project_id) self.branch_name = self._build_pr_branch_name(project_id)
self.active_pull_request = None self.active_pull_request = None
self._gitea_username: str | None = None self._gitea_username: str | None = None
existing_repository: dict | None = None
hinted_issue_number = (related_issue_hint or {}).get('number') if related_issue_hint else None hinted_issue_number = (related_issue_hint or {}).get('number') if related_issue_hint else None
self.related_issue_number = hinted_issue_number if hinted_issue_number is not None else self._extract_issue_number(prompt_text) self.related_issue_number = hinted_issue_number if hinted_issue_number is not None else self._extract_issue_number(prompt_text)
self.related_issue: dict | None = DatabaseManager._normalize_issue(related_issue_hint) self.related_issue: dict | None = DatabaseManager._normalize_issue(related_issue_hint)
@@ -107,9 +119,12 @@ class AgentOrchestrator:
latest_ui = self.db_manager._get_latest_ui_snapshot_data(self.history.id) latest_ui = self.db_manager._get_latest_ui_snapshot_data(self.history.id)
repository = latest_ui.get('repository') if isinstance(latest_ui, dict) else None repository = latest_ui.get('repository') if isinstance(latest_ui, dict) else None
if isinstance(repository, dict) and repository: if isinstance(repository, dict) and repository:
existing_repository = dict(repository)
self.repo_owner = repository.get('owner') or self.repo_owner self.repo_owner = repository.get('owner') or self.repo_owner
self.repo_name = repository.get('name') or self.repo_name self.repo_name = repository.get('name') or self.repo_name
self.repo_url = repository.get('url') or self.repo_url self.repo_url = repository.get('url') or self.repo_url
git_state = latest_ui.get('git') if isinstance(latest_ui.get('git'), dict) else {}
self.branch_name = git_state.get('active_branch') or self.branch_name
if self.prompt_text: if self.prompt_text:
self.prompt_audit = self.db_manager.log_prompt_submission( self.prompt_audit = self.db_manager.log_prompt_submission(
history_id=self.history.id, history_id=self.history.id,
@@ -126,18 +141,60 @@ class AgentOrchestrator:
self.ui_manager.ui_data["project_root"] = str(self.project_root) self.ui_manager.ui_data["project_root"] = str(self.project_root)
self.ui_manager.ui_data["features"] = list(self.features) self.ui_manager.ui_data["features"] = list(self.features)
self.ui_manager.ui_data["tech_stack"] = list(self.tech_stack) self.ui_manager.ui_data["tech_stack"] = list(self.tech_stack)
self.ui_manager.ui_data["repository"] = { repository_ui = {
"owner": self.repo_owner, "owner": self.repo_owner,
"name": self.repo_name, "name": self.repo_name,
"mode": "project" if settings.use_project_repositories else "shared", "mode": "project" if settings.use_project_repositories else "shared",
"status": "pending" if settings.use_project_repositories else "shared", "status": "pending" if settings.use_project_repositories else "shared",
"provider": "gitea", "provider": "gitea",
} }
if existing_repository:
repository_ui.update(existing_repository)
self.ui_manager.ui_data["repository"] = repository_ui
if self.related_issue: if self.related_issue:
self.ui_manager.ui_data["related_issue"] = self.related_issue self.ui_manager.ui_data["related_issue"] = self.related_issue
if self.active_pull_request: if self.active_pull_request:
self.ui_manager.ui_data["pull_request"] = self.active_pull_request self.ui_manager.ui_data["pull_request"] = self.active_pull_request
def _repository_supports_remote_delivery(self, repository: dict | None = None) -> bool:
"""Return whether repository metadata supports git push and PR delivery."""
repo = repository or self.ui_manager.ui_data.get('repository') or {}
return repo.get('mode') in self.REMOTE_READY_REPOSITORY_MODES and repo.get('status') in self.REMOTE_READY_REPOSITORY_STATUSES
def _static_files(self) -> dict[str, str]:
"""Files that do not need prompt-specific generation."""
return {
".gitignore": "__pycache__/\n*.pyc\n.venv/\n.pytest_cache/\n.mypy_cache/\n",
}
def _fallback_generated_files(self) -> dict[str, str]:
"""Deterministic fallback files when LLM generation is unavailable."""
feature_section = "\n".join(f"- {feature}" for feature in self.features) or "- None specified"
tech_section = "\n".join(f"- {tech}" for tech in self.tech_stack) or "- Python"
return {
"README.md": (
f"# {self.project_name}\n\n"
f"{self.description}\n\n"
"## Features\n"
f"{feature_section}\n\n"
"## Tech Stack\n"
f"{tech_section}\n"
),
"requirements.txt": "fastapi\nuvicorn\npytest\n",
"main.py": (
"from fastapi import FastAPI\n\n"
"app = FastAPI(title=\"Generated App\")\n\n"
"@app.get('/')\n"
"def read_root():\n"
f" return {{'name': '{self.project_name}', 'status': 'generated', 'features': {self.features!r}}}\n"
),
"tests/test_app.py": (
"from main import read_root\n\n"
"def test_read_root():\n"
f" assert read_root()['name'] == '{self.project_name}'\n"
),
}
def _build_pr_branch_name(self, project_id: str) -> str: def _build_pr_branch_name(self, project_id: str) -> str:
"""Build a stable branch name used until the PR is merged.""" """Build a stable branch name used until the PR is merged."""
return f"ai/{project_id}" return f"ai/{project_id}"
@@ -158,7 +215,7 @@ class AgentOrchestrator:
"""Persist the current generation plan as an inspectable trace.""" """Persist the current generation plan as an inspectable trace."""
if not self.db_manager or not self.history or not self.prompt_audit: if not self.db_manager or not self.history or not self.prompt_audit:
return return
planned_files = list(self._template_files().keys()) planned_files = list(self._static_files().keys()) + list(self._fallback_generated_files().keys())
self.db_manager.log_llm_trace( self.db_manager.log_llm_trace(
project_id=self.project_id, project_id=self.project_id,
history_id=self.history.id, history_id=self.history.id,
@@ -188,6 +245,155 @@ class AgentOrchestrator:
fallback_used=False, fallback_used=False,
) )
def _is_safe_relative_path(self, path: str) -> bool:
"""Return whether a generated file path is safe to write under the project root."""
normalized = str(PurePosixPath((path or '').strip()))
if not normalized or normalized in {'.', '..'}:
return False
if normalized.startswith('/') or normalized.startswith('../') or '/../' in normalized:
return False
if normalized.startswith('.git/'):
return False
return True
def _is_supported_generated_text_file(self, path: str) -> bool:
"""Return whether the generated path is a supported text artifact."""
normalized = PurePosixPath(path)
if normalized.name in self.GENERATED_TEXT_FILE_NAMES:
return True
return normalized.suffix.lower() in self.GENERATED_TEXT_FILE_SUFFIXES
def _collect_workspace_context(self) -> dict:
"""Collect a compact, text-only snapshot of the current project workspace."""
if not self.project_root.exists():
return {'has_existing_files': False, 'files': []}
files: list[dict] = []
total_chars = 0
for path in sorted(self.project_root.rglob('*')):
if not path.is_file():
continue
relative_path = path.relative_to(self.project_root).as_posix()
if relative_path == '.gitignore':
continue
if not self._is_safe_relative_path(relative_path) or not self._is_supported_generated_text_file(relative_path):
continue
try:
content = path.read_text(encoding='utf-8')
except (UnicodeDecodeError, OSError):
continue
remaining_chars = self.MAX_WORKSPACE_CONTEXT_CHARS - total_chars
if remaining_chars <= 0:
break
snippet = content[:remaining_chars]
files.append(
{
'path': relative_path,
'content': snippet,
'truncated': len(snippet) < len(content),
}
)
total_chars += len(snippet)
if len(files) >= self.MAX_WORKSPACE_CONTEXT_FILES:
break
return {'has_existing_files': bool(files), 'files': files}
def _parse_generated_files(self, content: str | None) -> dict[str, str]:
"""Parse an LLM file bundle response into relative-path/content pairs."""
if not content:
return {}
try:
parsed = json.loads(content)
except Exception:
return {}
generated: dict[str, str] = {}
if isinstance(parsed, dict) and isinstance(parsed.get('files'), list):
for item in parsed['files']:
if not isinstance(item, dict):
continue
path = str(item.get('path') or '').strip()
file_content = item.get('content')
if (
self._is_safe_relative_path(path)
and self._is_supported_generated_text_file(path)
and isinstance(file_content, str)
and file_content.strip()
):
generated[path] = file_content.rstrip() + "\n"
elif isinstance(parsed, dict):
for path, file_content in parsed.items():
normalized_path = str(path).strip()
if (
self._is_safe_relative_path(normalized_path)
and self._is_supported_generated_text_file(normalized_path)
and isinstance(file_content, str)
and file_content.strip()
):
generated[normalized_path] = file_content.rstrip() + "\n"
return generated
async def _generate_prompt_driven_files(self) -> tuple[dict[str, str], dict | None, bool]:
"""Use the configured LLM to generate prompt-specific project files."""
fallback_files = self._fallback_generated_files()
workspace_context = self._collect_workspace_context()
has_existing_files = bool(workspace_context.get('has_existing_files'))
if has_existing_files:
system_prompt = (
'You modify an existing software repository. '
'Return only JSON. Update the smallest necessary set of files to satisfy the new prompt. '
'Prefer editing existing files over inventing a new starter app. '
'Only return files that should be written. Omit unchanged files. '
'Use repository-relative paths and do not wrap the JSON in markdown fences.'
)
user_prompt = (
f"Project name: {self.project_name}\n"
f"Description: {self.description}\n"
f"Original prompt: {self.prompt_text or self.description}\n"
f"Requested features: {json.dumps(self.features)}\n"
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
f"Current workspace snapshot:\n{json.dumps(workspace_context['files'], indent=2)}\n\n"
'Return JSON shaped as {"files": [{"path": "relative/path.py", "content": "..."}, ...]}. '
'Each file path must be relative to the repository root.'
)
else:
system_prompt = (
'You generate small but concrete starter projects. '
'Return only JSON. Provide production-like but compact code that directly reflects the user request. '
'Include the files README.md, requirements.txt, main.py, and tests/test_app.py. '
'Use FastAPI for Python web requests unless the prompt clearly demands something else. '
'The test must verify a real behavior from main.py. '
'Do not wrap the JSON in markdown fences.'
)
user_prompt = (
f"Project name: {self.project_name}\n"
f"Description: {self.description}\n"
f"Original prompt: {self.prompt_text or self.description}\n"
f"Requested features: {json.dumps(self.features)}\n"
f"Preferred tech stack: {json.dumps(self.tech_stack)}\n"
f"Related issue: {json.dumps(self.related_issue) if self.related_issue else 'null'}\n\n"
'Return JSON shaped as {"files": [{"path": "README.md", "content": "..."}, ...]}. '
'At minimum include README.md, requirements.txt, main.py, and tests/test_app.py.'
)
content, trace = await LLMServiceClient().chat_with_trace(
stage='generation_plan',
system_prompt=system_prompt,
user_prompt=user_prompt,
tool_context_input={
'project_id': self.project_id,
'project_name': self.project_name,
'repository': self.ui_manager.ui_data.get('repository'),
'related_issue': self.related_issue,
'workspace_files': workspace_context.get('files', []),
},
expect_json=True,
)
generated_files = self._parse_generated_files(content)
if has_existing_files:
return generated_files, trace, True
merged_files = {**fallback_files, **generated_files}
return merged_files, trace, False
async def _sync_issue_context(self) -> None: async def _sync_issue_context(self) -> None:
"""Sync repository issues and resolve a linked issue from the prompt when present.""" """Sync repository issues and resolve a linked issue from the prompt when present."""
if not self.db_manager or not self.history: if not self.db_manager or not self.history:
@@ -212,6 +418,14 @@ class AgentOrchestrator:
self.db_manager.attach_issue_to_prompt(self.prompt_audit.id, self.related_issue) self.db_manager.attach_issue_to_prompt(self.prompt_audit.id, self.related_issue)
async def _ensure_remote_repository(self) -> None: async def _ensure_remote_repository(self) -> None:
repository = self.ui_manager.ui_data.get("repository") or {}
if self._repository_supports_remote_delivery(repository):
repository.setdefault("provider", "gitea")
repository.setdefault("status", "ready")
if repository.get("url"):
self.repo_url = repository.get("url")
self.ui_manager.ui_data["repository"] = repository
return
if not settings.use_project_repositories: if not settings.use_project_repositories:
self.ui_manager.ui_data["repository"]["status"] = "shared" self.ui_manager.ui_data["repository"]["status"] = "shared"
if settings.gitea_repo: if settings.gitea_repo:
@@ -303,9 +517,7 @@ class AgentOrchestrator:
async def _push_branch(self, branch: str) -> dict | None: async def _push_branch(self, branch: str) -> dict | None:
"""Push a branch to the configured project repository when available.""" """Push a branch to the configured project repository when available."""
repository = self.ui_manager.ui_data.get('repository') or {} repository = self.ui_manager.ui_data.get('repository') or {}
if repository.get('mode') != 'project': if not self._repository_supports_remote_delivery(repository):
return None
if repository.get('status') not in {'created', 'exists', 'ready'}:
return None return None
if not settings.gitea_token or not self.repo_owner or not self.repo_name: if not settings.gitea_token or not self.repo_owner or not self.repo_name:
return None return None
@@ -352,7 +564,7 @@ class AgentOrchestrator:
self.ui_manager.ui_data['pull_request'] = self.active_pull_request self.ui_manager.ui_data['pull_request'] = self.active_pull_request
return self.active_pull_request return self.active_pull_request
repository = self.ui_manager.ui_data.get('repository') or {} repository = self.ui_manager.ui_data.get('repository') or {}
if repository.get('mode') != 'project' or repository.get('status') not in {'created', 'exists', 'ready'}: if not self._repository_supports_remote_delivery(repository):
return None return None
title = f"AI delivery for {self.project_name}" title = f"AI delivery for {self.project_name}"
@@ -393,9 +605,7 @@ class AgentOrchestrator:
async def _push_remote_commit(self, commit_hash: str, commit_message: str, changed_files: list[str], base_commit: str | None) -> dict | None: async def _push_remote_commit(self, commit_hash: str, commit_message: str, changed_files: list[str], base_commit: str | None) -> dict | None:
"""Push the local commit to the provisioned Gitea repository and build browser links.""" """Push the local commit to the provisioned Gitea repository and build browser links."""
repository = self.ui_manager.ui_data.get("repository") or {} repository = self.ui_manager.ui_data.get("repository") or {}
if repository.get("mode") != "project": if not self._repository_supports_remote_delivery(repository):
return None
if repository.get("status") not in {"created", "exists", "ready"}:
return None return None
push_result = await self._push_branch(self.branch_name) push_result = await self._push_branch(self.branch_name)
if push_result is None: if push_result is None:
@@ -455,6 +665,8 @@ class AgentOrchestrator:
target.parent.mkdir(parents=True, exist_ok=True) target.parent.mkdir(parents=True, exist_ok=True)
change_type = "UPDATE" if target.exists() else "CREATE" change_type = "UPDATE" if target.exists() else "CREATE"
previous_content = target.read_text(encoding="utf-8") if target.exists() else "" previous_content = target.read_text(encoding="utf-8") if target.exists() else ""
if previous_content == content:
return
diff_text = self._build_diff_text(relative_path, previous_content, content) diff_text = self._build_diff_text(relative_path, previous_content, content)
target.write_text(content, encoding="utf-8") target.write_text(content, encoding="utf-8")
self.changed_files.append(relative_path) self.changed_files.append(relative_path)
@@ -468,34 +680,6 @@ class AgentOrchestrator:
} }
) )
def _template_files(self) -> dict[str, str]:
feature_section = "\n".join(f"- {feature}" for feature in self.features) or "- None specified"
tech_section = "\n".join(f"- {tech}" for tech in self.tech_stack) or "- Python"
return {
".gitignore": "__pycache__/\n*.pyc\n.venv/\n.pytest_cache/\n.mypy_cache/\n",
"README.md": (
f"# {self.project_name}\n\n"
f"{self.description}\n\n"
"## Features\n"
f"{feature_section}\n\n"
"## Tech Stack\n"
f"{tech_section}\n"
),
"requirements.txt": "fastapi\nuvicorn\npytest\n",
"main.py": (
"from fastapi import FastAPI\n\n"
"app = FastAPI(title=\"Generated App\")\n\n"
"@app.get('/')\n"
"def read_root():\n"
f" return {{'name': '{self.project_name}', 'status': 'generated', 'features': {self.features!r}}}\n"
),
"tests/test_app.py": (
"from main import read_root\n\n"
"def test_read_root():\n"
f" assert read_root()['name'] == '{self.project_name}'\n"
),
}
async def run(self) -> dict: async def run(self) -> dict:
"""Run the software generation process with full audit logging.""" """Run the software generation process with full audit logging."""
try: try:
@@ -585,18 +769,37 @@ class AgentOrchestrator:
async def _create_project_structure(self) -> None: async def _create_project_structure(self) -> None:
"""Create initial project structure.""" """Create initial project structure."""
self.project_root.mkdir(parents=True, exist_ok=True) self.project_root.mkdir(parents=True, exist_ok=True)
for relative_path, content in self._template_files().items(): for relative_path, content in self._static_files().items():
if relative_path.startswith("main.py") or relative_path.startswith("tests/"):
continue
self._write_file(relative_path, content) self._write_file(relative_path, content)
self._append_log(f"Project structure created under {self.project_root}.") self._append_log(f"Project structure created under {self.project_root}.")
async def _generate_code(self) -> None: async def _generate_code(self) -> None:
"""Generate code using Ollama.""" """Generate code using Ollama."""
for relative_path, content in self._template_files().items(): change_count_before = len(self.pending_code_changes)
if relative_path in {"main.py", "tests/test_app.py"}: generated_files, trace, editing_existing_workspace = await self._generate_prompt_driven_files()
self._write_file(relative_path, content) for relative_path, content in generated_files.items():
self._append_log("Application entrypoint and smoke test generated.") self._write_file(relative_path, content)
if editing_existing_workspace and len(self.pending_code_changes) == change_count_before:
raise RuntimeError('The LLM response did not produce any file changes for the existing project.')
fallback_used = bool(trace and trace.get('fallback_used')) or trace is None
if self.db_manager and self.history and self.prompt_audit and trace:
self.db_manager.log_llm_trace(
project_id=self.project_id,
history_id=self.history.id,
prompt_id=self.prompt_audit.id,
stage='code_generation',
provider=trace.get('provider', 'ollama'),
model=trace.get('model', settings.OLLAMA_MODEL),
system_prompt=trace.get('system_prompt', ''),
user_prompt=trace.get('user_prompt', self.prompt_text or self.description),
assistant_response=trace.get('assistant_response', ''),
raw_response=trace.get('raw_response'),
fallback_used=fallback_used,
)
if fallback_used:
self._append_log('LLM code generation was unavailable; used deterministic scaffolding fallback.')
else:
self._append_log('Application files generated from the prompt with the configured LLM.')
async def _run_tests(self) -> None: async def _run_tests(self) -> None:
"""Run tests for the generated code.""" """Run tests for the generated code."""

View File

@@ -96,6 +96,8 @@ class RequestInterpreter:
parsed = json.loads(content) parsed = json.loads(content)
interpreted = self._normalize_interpreted_request(parsed, normalized) interpreted = self._normalize_interpreted_request(parsed, normalized)
routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context) routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context)
if routing.get('intent') == 'continue_project' and routing.get('project_name'):
interpreted['name'] = routing['project_name']
naming_trace = None naming_trace = None
if routing.get('intent') == 'new_project': if routing.get('intent') == 'new_project':
interpreted, routing, naming_trace = await self._refine_new_project_identity( interpreted, routing, naming_trace = await self._refine_new_project_identity(
@@ -265,6 +267,14 @@ class RequestInterpreter:
matched_project = project matched_project = project
break break
intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project') intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project')
if matched_project is None and intent == 'continue_project':
recent_chat_history = context.get('recent_chat_history', [])
recent_project_id = recent_chat_history[0].get('project_id') if recent_chat_history else None
if recent_project_id:
matched_project = next(
(project for project in context.get('projects', []) if project.get('project_id') == recent_project_id),
None,
)
normalized = { normalized = {
'intent': intent, 'intent': intent,
'project_id': matched_project.get('project_id') if matched_project else project_id, 'project_id': matched_project.get('project_id') if matched_project else project_id,

View File

@@ -1194,6 +1194,16 @@ def create_dashboard():
ui.notify(result.get('message', 'Audit cleanup completed'), color='positive') ui.notify(result.get('message', 'Audit cleanup completed'), color='positive')
_refresh_all_dashboard_sections() _refresh_all_dashboard_sections()
def retry_project_delivery_action(project_id: str) -> None:
db = get_db_sync()
if db is None:
ui.notify('Database session could not be created', color='negative')
return
with closing(db):
result = DatabaseManager(db).retry_project_delivery(project_id)
ui.notify(result.get('message', 'Delivery retry completed'), color='positive' if result.get('status') == 'success' else 'negative')
_refresh_all_dashboard_sections()
def save_llm_prompt_action(prompt_key: str) -> None: def save_llm_prompt_action(prompt_key: str) -> None:
db = get_db_sync() db = get_db_sync()
if db is None: if db is None:
@@ -1472,11 +1482,15 @@ def create_dashboard():
with ui.row().classes('justify-between items-center'): with ui.row().classes('justify-between items-center'):
ui.label(project['project_name']).style('font-weight: 700; color: #2f241d;') ui.label(project['project_name']).style('font-weight: 700; color: #2f241d;')
with ui.row().classes('items-center gap-2'): with ui.row().classes('items-center gap-2'):
if project.get('delivery_status') == 'uncommitted': if project.get('delivery_status') in {'uncommitted', 'local_only', 'pushed_no_pr'}:
ui.label('uncommitted delivery').classes('factory-chip') ui.label(project.get('delivery_status', 'delivery')).classes('factory-chip')
ui.label(project['status']).classes('factory-chip') ui.label(project['status']).classes('factory-chip')
ui.linear_progress(value=(project['progress'] or 0) / 100, show_value=False).classes('w-full') ui.linear_progress(value=(project['progress'] or 0) / 100, show_value=False).classes('w-full')
ui.label(project.get('delivery_message') if project.get('delivery_status') == 'uncommitted' else project['message'] or 'No status message').classes('factory-muted') ui.label(
project.get('delivery_message')
if project.get('delivery_status') in {'uncommitted', 'local_only', 'pushed_no_pr'}
else project['message'] or 'No status message'
).classes('factory-muted')
else: else:
ui.label('No projects in the database yet.').classes('factory-muted') ui.label('No projects in the database yet.').classes('factory-muted')
@@ -1532,22 +1546,28 @@ def create_dashboard():
lambda: delete_project_action(project_id), lambda: delete_project_action(project_id),
), ),
).props('outline color=negative') ).props('outline color=negative')
if project.get('delivery_status') == 'uncommitted': if project.get('delivery_status') in {'uncommitted', 'local_only', 'pushed_no_pr'}:
with ui.card().classes('q-ma-md q-pa-md').style('background: #fff4dd; border: 1px solid #e0b36a;'): with ui.card().classes('q-ma-md q-pa-md').style('background: #fff4dd; border: 1px solid #e0b36a;'):
with ui.row().classes('items-center justify-between w-full gap-3'): with ui.row().classes('items-center justify-between w-full gap-3'):
with ui.column().classes('gap-1'): with ui.column().classes('gap-1'):
ui.label('Uncommitted delivery detected').style('font-weight: 700; color: #7a4b16;') ui.label('Remote delivery attention needed').style('font-weight: 700; color: #7a4b16;')
ui.label(project.get('delivery_message') or 'Generated changes were recorded without a matching commit.').classes('factory-muted') ui.label(project.get('delivery_message') or 'Generated changes were not published to the tracked repository.').classes('factory-muted')
ui.button( with ui.row().classes('items-center gap-2'):
'Purge project orphan rows', ui.button(
on_click=lambda _=None, project_id=project['project_id']: _render_confirmation_dialog( 'Retry delivery',
'Purge orphaned generated change rows for this project?', on_click=lambda _=None, project_id=project['project_id']: retry_project_delivery_action(project_id),
'Delete only generated CODE_CHANGE audit rows for this project that have no matching git commit. Valid history remains intact.', ).props('outline color=positive')
'Purge Project Orphans', if project.get('delivery_status') == 'uncommitted':
lambda: purge_orphan_code_changes_action(project_id), ui.button(
color='warning', 'Purge project orphan rows',
), on_click=lambda _=None, project_id=project['project_id']: _render_confirmation_dialog(
).props('outline color=warning') 'Purge orphaned generated change rows for this project?',
'Delete only generated CODE_CHANGE audit rows for this project that have no matching git commit. Valid history remains intact.',
'Purge Project Orphans',
lambda: purge_orphan_code_changes_action(project_id),
color='warning',
),
).props('outline color=warning')
with ui.grid(columns=2).classes('w-full gap-4 q-pa-md'): with ui.grid(columns=2).classes('w-full gap-4 q-pa-md'):
with ui.card().classes('q-pa-md'): with ui.card().classes('q-pa-md'):
ui.label('Repository').style('font-weight: 700; color: #3a281a;') ui.label('Repository').style('font-weight: 700; color: #3a281a;')
@@ -1598,20 +1618,26 @@ def create_dashboard():
lambda: delete_project_action(project_id), lambda: delete_project_action(project_id),
), ),
).props('outline color=negative') ).props('outline color=negative')
if project.get('delivery_status') == 'uncommitted': if project.get('delivery_status') in {'uncommitted', 'local_only', 'pushed_no_pr'}:
with ui.card().classes('q-ma-md q-pa-md').style('background: #fff4dd; border: 1px solid #e0b36a;'): with ui.card().classes('q-ma-md q-pa-md').style('background: #fff4dd; border: 1px solid #e0b36a;'):
ui.label('Archived project contains uncommitted generated change rows').style('font-weight: 700; color: #7a4b16;') ui.label('Archived project needs delivery attention').style('font-weight: 700; color: #7a4b16;')
ui.label(project.get('delivery_message') or 'Generated changes were recorded without a matching commit.').classes('factory-muted') ui.label(project.get('delivery_message') or 'Generated changes were not published to the tracked repository.').classes('factory-muted')
ui.button( with ui.row().classes('items-center gap-2 q-mt-sm'):
'Purge archived project orphan rows', ui.button(
on_click=lambda _=None, project_id=project['project_id']: _render_confirmation_dialog( 'Retry delivery',
'Purge orphaned generated change rows for this archived project?', on_click=lambda _=None, project_id=project['project_id']: retry_project_delivery_action(project_id),
'Delete only generated CODE_CHANGE audit rows for this project that have no matching git commit. Valid history remains intact.', ).props('outline color=positive')
'Purge Archived Orphans', if project.get('delivery_status') == 'uncommitted':
lambda: purge_orphan_code_changes_action(project_id), ui.button(
color='warning', 'Purge archived project orphan rows',
), on_click=lambda _=None, project_id=project['project_id']: _render_confirmation_dialog(
).props('outline color=warning').classes('q-mt-sm') 'Purge orphaned generated change rows for this archived project?',
'Delete only generated CODE_CHANGE audit rows for this project that have no matching git commit. Valid history remains intact.',
'Purge Archived Orphans',
lambda: purge_orphan_code_changes_action(project_id),
color='warning',
),
).props('outline color=warning')
with ui.grid(columns=2).classes('w-full gap-4 q-pa-md'): with ui.grid(columns=2).classes('w-full gap-4 q-pa-md'):
with ui.card().classes('q-pa-md'): with ui.card().classes('q-pa-md'):
ui.label('Repository').style('font-weight: 700; color: #3a281a;') ui.label('Repository').style('font-weight: 700; color: #3a281a;')

View File

@@ -6,7 +6,7 @@ from urllib.parse import urlparse
from alembic import command from alembic import command
from alembic.config import Config from alembic.config import Config
from sqlalchemy import create_engine, event, text from sqlalchemy import create_engine, text
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.orm import Session, sessionmaker
@@ -64,20 +64,6 @@ def get_engine() -> Engine:
pool_timeout=settings.DB_POOL_TIMEOUT or 30 pool_timeout=settings.DB_POOL_TIMEOUT or 30
) )
# Event listener for connection checkout (PostgreSQL only)
if not settings.use_sqlite:
@event.listens_for(engine, "checkout")
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
"""Log connection checkout for audit purposes."""
if settings.LOG_LEVEL in ("DEBUG", "INFO"):
print(f"DB Connection checked out from pool")
@event.listens_for(engine, "checkin")
def receive_checkin(dbapi_connection, connection_record):
"""Log connection checkin for audit purposes."""
if settings.LOG_LEVEL == "DEBUG":
print(f"DB Connection returned to pool")
return engine return engine

View File

@@ -306,7 +306,7 @@ async def _run_generation(
resolved_prompt_text = prompt_text or _compose_prompt_text(request) resolved_prompt_text = prompt_text or _compose_prompt_text(request)
if preferred_project_id and reusable_history is not None: if preferred_project_id and reusable_history is not None:
project_id = reusable_history.project_id project_id = reusable_history.project_id
elif reusable_history and not is_explicit_new_project and manager.get_open_pull_request(project_id=reusable_history.project_id): elif reusable_history and not is_explicit_new_project:
project_id = reusable_history.project_id project_id = reusable_history.project_id
else: else:
if is_explicit_new_project or prompt_text: if is_explicit_new_project or prompt_text:
@@ -404,8 +404,18 @@ async def _run_generation(
fallback_used=summary_trace.get('fallback_used', False), fallback_used=summary_trace.get('fallback_used', False),
) )
response_data['summary_message'] = summary_message response_data['summary_message'] = summary_message
response_data['summary_metadata'] = {
'provider': summary_trace.get('provider'),
'model': summary_trace.get('model'),
'fallback_used': bool(summary_trace.get('fallback_used')),
}
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id) response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
return {'status': result['status'], 'data': response_data, 'summary_message': summary_message} return {
'status': result['status'],
'data': response_data,
'summary_message': summary_message,
'summary_metadata': response_data['summary_metadata'],
}
def _project_root(project_id: str) -> Path: def _project_root(project_id: str) -> Path: