Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ed8dc48280 | |||
| c3cf8da42d | |||
| e495775b91 | |||
| 356c388efb | |||
| fd812476cc | |||
| 032139c14f | |||
| 194d5658a6 | |||
| b9faac8d16 | |||
| 80d7716e65 | |||
| 321bf74aef | |||
| 55ee75106c | |||
| b2829caa02 | |||
| d4b280cf75 | |||
| 806db8537b | |||
| 360ed5c6f3 | |||
| 4b9eb2f359 | |||
| ebfcfb969a | |||
| 56b05eb686 | |||
| 59a7e9787e | |||
| a357a307a7 | |||
| af4247e657 | |||
| 227ad1ad6f | |||
| 82e53a6651 | |||
| e9dc1ede55 | |||
| 6ee1c46826 | |||
| 4f5c87bed9 | |||
| 7180031d1f | |||
| de4feb61cd | |||
| ddb9f2100b | |||
| 034bb3eb63 | |||
| 06a50880b7 |
@@ -4,6 +4,7 @@ permissions:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
SKIP_MAKE_SETUP_CHECK: 'true'
|
SKIP_MAKE_SETUP_CHECK: 'true'
|
||||||
|
DOCKER_API_VERSION: '1.43'
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -49,11 +50,15 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
- name: Login to Gitea container registry
|
- name: Login to Gitea container registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
|
env:
|
||||||
|
DOCKER_API_VERSION: ${{ env.DOCKER_API_VERSION }}
|
||||||
with:
|
with:
|
||||||
username: gitearobot
|
username: gitearobot
|
||||||
password: ${{ secrets.PACKAGE_GITEA_PAT }}
|
password: ${{ secrets.PACKAGE_GITEA_PAT }}
|
||||||
registry: git.disi.dev
|
registry: git.disi.dev
|
||||||
- name: Build and publish
|
- name: Build and publish
|
||||||
|
env:
|
||||||
|
DOCKER_API_VERSION: ${{ env.DOCKER_API_VERSION }}
|
||||||
run: |
|
run: |
|
||||||
REPOSITORY_OWNER=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $1}' | tr '[:upper:]' '[:lower:]')
|
REPOSITORY_OWNER=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $1}' | tr '[:upper:]' '[:lower:]')
|
||||||
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
|
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
|
||||||
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
sqlite.db
|
||||||
|
.nicegui/
|
||||||
@@ -40,4 +40,4 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
|||||||
CMD curl -f http://localhost:8000/health || exit 1
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
# Run application
|
# Run application
|
||||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]"]
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||||
|
|||||||
127
HISTORY.md
127
HISTORY.md
@@ -5,10 +5,137 @@ Changelog
|
|||||||
(unreleased)
|
(unreleased)
|
||||||
------------
|
------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Add additional deletion confirmation, refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.7.0 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
- Feat: gitea issue integration, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
- Feat: better history data, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.6.5 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Better n8n workflow, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.6.4 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Add Telegram helper functions, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.6.3 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- N8n workflow generation, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.6.2 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Fix Quasar layout issues, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.6.1 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Fix commit for version push, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
- Chore: add more health info for n8n, refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.6.0 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
- Feat(api): expose database target in health refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
- Fix(db): prefer postgres config in production refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.5.0 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
- Feat(dashboard): expose repository urls refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
- Feat(factory): serve dashboard at root and create project repos refs
|
||||||
|
NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.4.1 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
- Fix(ci): pin docker api version for release builds refs NOISSUE.
|
||||||
|
[Simon Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.4.0 (2026-04-10)
|
||||||
|
------------------
|
||||||
|
- Chore(git): ignore local sqlite database refs NOISSUE. [Simon
|
||||||
|
Diesenreiter]
|
||||||
|
- Feat(factory): implement db-backed dashboard and workflow automation
|
||||||
|
refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
|
||||||
|
0.3.6 (2026-04-04)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Rename gitea workflow, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.3.5 (2026-04-04)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Fix
|
||||||
|
~~~
|
||||||
|
- Some cleanup, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
0.3.4 (2026-04-04)
|
||||||
|
------------------
|
||||||
|
|
||||||
Fix
|
Fix
|
||||||
~~~
|
~~~
|
||||||
- Fix database init, refs NOISSUE. [Simon Diesenreiter]
|
- Fix database init, refs NOISSUE. [Simon Diesenreiter]
|
||||||
|
|
||||||
|
Other
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
|
||||||
0.3.3 (2026-04-04)
|
0.3.3 (2026-04-04)
|
||||||
------------------
|
------------------
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -1,5 +1,7 @@
|
|||||||
.ONESHELL:
|
.ONESHELL:
|
||||||
|
|
||||||
|
DOCKER_API_VERSION ?= 1.43
|
||||||
|
|
||||||
.PHONY: issetup
|
.PHONY: issetup
|
||||||
issetup:
|
issetup:
|
||||||
@[ -f .git/hooks/commit-msg ] || [ -z ${SKIP_MAKE_SETUP_CHECK+x} ] || (echo "You must run 'make setup' first to initialize the repo!" && exit 1)
|
@[ -f .git/hooks/commit-msg ] || [ -z ${SKIP_MAKE_SETUP_CHECK+x} ] || (echo "You must run 'make setup' first to initialize the repo!" && exit 1)
|
||||||
@@ -42,7 +44,7 @@ release: issetup ## Create a new tag for release.
|
|||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build: issetup ## Create a new tag for release.
|
build: issetup ## Create a new tag for release.
|
||||||
@docker build -t ai-software-factory:$(cat ai-software-factory/VERSION) -f Containerfile .
|
@DOCKER_API_VERSION=$(DOCKER_API_VERSION) docker build -t ai-software-factory:$(cat ai_software_factory/VERSION) -f Containerfile .
|
||||||
|
|
||||||
# This project has been generated from rochacbruno/python-project-template
|
# This project has been generated from rochacbruno/python-project-template
|
||||||
#igest__ = 'rochacbruno'
|
#igest__ = 'rochacbruno'
|
||||||
|
|||||||
35
README.md
35
README.md
@@ -6,7 +6,7 @@ Automated software generation service powered by Ollama LLM. This service allows
|
|||||||
|
|
||||||
- **Telegram Integration**: Receive software requests via Telegram bot
|
- **Telegram Integration**: Receive software requests via Telegram bot
|
||||||
- **Ollama LLM**: Uses Ollama-hosted models for code generation
|
- **Ollama LLM**: Uses Ollama-hosted models for code generation
|
||||||
- **Git Integration**: Automatically commits code to gitea
|
- **Git Integration**: Creates a dedicated Gitea repository per generated project inside your organization
|
||||||
- **Pull Requests**: Creates PRs for user review before merging
|
- **Pull Requests**: Creates PRs for user review before merging
|
||||||
- **Web UI**: Beautiful dashboard for monitoring project progress
|
- **Web UI**: Beautiful dashboard for monitoring project progress
|
||||||
- **n8n Workflows**: Bridges Telegram with LLMs via n8n webhooks
|
- **n8n Workflows**: Bridges Telegram with LLMs via n8n webhooks
|
||||||
@@ -49,9 +49,19 @@ OLLAMA_MODEL=llama3
|
|||||||
|
|
||||||
# Gitea
|
# Gitea
|
||||||
GITEA_URL=https://gitea.yourserver.com
|
GITEA_URL=https://gitea.yourserver.com
|
||||||
GITEA_TOKEN= analyze your_gitea_api_token
|
GITEA_TOKEN=your_gitea_api_token
|
||||||
GITEA_OWNER=ai-software-factory
|
GITEA_OWNER=ai-software-factory
|
||||||
GITEA_REPO=ai-software-factory
|
# Optional legacy fixed-repository mode. Leave empty to create one repo per project.
|
||||||
|
GITEA_REPO=
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# In production, provide PostgreSQL settings. They take precedence over the SQLite default.
|
||||||
|
# Setting USE_SQLITE=false is still supported if you want to make the choice explicit.
|
||||||
|
POSTGRES_HOST=postgres.yourserver.com
|
||||||
|
POSTGRES_PORT=5432
|
||||||
|
POSTGRES_USER=ai_software_factory
|
||||||
|
POSTGRES_PASSWORD=change-me
|
||||||
|
POSTGRES_DB=ai_software_factory
|
||||||
|
|
||||||
# n8n
|
# n8n
|
||||||
N8N_WEBHOOK_URL=http://n8n.yourserver.com/webhook/telegram
|
N8N_WEBHOOK_URL=http://n8n.yourserver.com/webhook/telegram
|
||||||
@@ -65,7 +75,7 @@ TELEGRAM_CHAT_ID=your_chat_id
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build Docker image
|
# Build Docker image
|
||||||
docker build -t ai-software-factory -f Containerfile .
|
DOCKER_API_VERSION=1.43 docker build -t ai-software-factory -f Containerfile .
|
||||||
|
|
||||||
# Run with Docker Compose
|
# Run with Docker Compose
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
@@ -76,26 +86,33 @@ docker-compose up -d
|
|||||||
1. **Send a request via Telegram:**
|
1. **Send a request via Telegram:**
|
||||||
|
|
||||||
```
|
```
|
||||||
Name: My Awesome App
|
Build an internal task management app for our operations team.
|
||||||
Description: A web application for managing tasks
|
It should support user authentication, task CRUD, notifications, and reporting.
|
||||||
Features: user authentication, task CRUD, notifications
|
Prefer FastAPI with PostgreSQL and a simple web dashboard.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The backend now interprets free-form Telegram text with Ollama before generation.
|
||||||
|
If `TELEGRAM_CHAT_ID` is set, the Telegram-trigger workflow only reacts to messages from that specific chat.
|
||||||
|
|
||||||
2. **Monitor progress via Web UI:**
|
2. **Monitor progress via Web UI:**
|
||||||
|
|
||||||
Open `http://yourserver:8000` to see real-time progress
|
Open `http://yourserver:8000/` to see the dashboard and `http://yourserver:8000/api` for API metadata
|
||||||
|
|
||||||
3. **Review PRs in Gitea:**
|
3. **Review PRs in Gitea:**
|
||||||
|
|
||||||
Check your gitea repository for generated PRs
|
Check your gitea repository for generated PRs
|
||||||
|
|
||||||
|
If you deploy the container with PostgreSQL environment variables set, the service now selects PostgreSQL automatically even though SQLite remains the default for local/test usage.
|
||||||
|
|
||||||
## API Endpoints
|
## API Endpoints
|
||||||
|
|
||||||
| Endpoint | Method | Description |
|
| Endpoint | Method | Description |
|
||||||
|------|------|-------|
|
|------|------|-------|
|
||||||
| `/` | GET | API information |
|
| `/` | GET | Dashboard |
|
||||||
|
| `/api` | GET | API information |
|
||||||
| `/health` | GET | Health check |
|
| `/health` | GET | Health check |
|
||||||
| `/generate` | POST | Generate new software |
|
| `/generate` | POST | Generate new software |
|
||||||
|
| `/generate/text` | POST | Interpret free-form text and generate software |
|
||||||
| `/status/{project_id}` | GET | Get project status |
|
| `/status/{project_id}` | GET | Get project status |
|
||||||
| `/projects` | GET | List all projects |
|
| `/projects` | GET | List all projects |
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ OLLAMA_MODEL=llama3
|
|||||||
GITEA_URL=https://gitea.yourserver.com
|
GITEA_URL=https://gitea.yourserver.com
|
||||||
GITEA_TOKEN=your_gitea_api_token
|
GITEA_TOKEN=your_gitea_api_token
|
||||||
GITEA_OWNER=your_organization_name
|
GITEA_OWNER=your_organization_name
|
||||||
GITEA_REPO= (optional - leave empty for any repo, or specify a default)
|
GITEA_REPO= (optional legacy fixed repository mode; leave empty to create one repo per project)
|
||||||
|
|
||||||
# n8n
|
# n8n
|
||||||
# n8n webhook for Telegram integration
|
# n8n webhook for Telegram integration
|
||||||
@@ -30,6 +30,8 @@ TELEGRAM_BOT_TOKEN=your_telegram_bot_token
|
|||||||
TELEGRAM_CHAT_ID=your_chat_id
|
TELEGRAM_CHAT_ID=your_chat_id
|
||||||
|
|
||||||
# PostgreSQL
|
# PostgreSQL
|
||||||
|
# In production, provide PostgreSQL settings below. They now take precedence over the SQLite default.
|
||||||
|
# You can also set USE_SQLITE=false explicitly if you want the intent to be obvious.
|
||||||
POSTGRES_HOST=postgres
|
POSTGRES_HOST=postgres
|
||||||
POSTGRES_PORT=5432
|
POSTGRES_PORT=5432
|
||||||
POSTGRES_USER=ai_test
|
POSTGRES_USER=ai_test
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
0.3.4
|
0.7.1
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
"""AI Software Factory agents."""
|
"""AI Software Factory agents."""
|
||||||
|
|
||||||
from agents.orchestrator import AgentOrchestrator
|
from .orchestrator import AgentOrchestrator
|
||||||
from agents.git_manager import GitManager
|
from .git_manager import GitManager
|
||||||
from agents.ui_manager import UIManager
|
from .ui_manager import UIManager
|
||||||
from agents.telegram import TelegramHandler
|
from .telegram import TelegramHandler
|
||||||
from agents.gitea import GiteaAPI
|
from .gitea import GiteaAPI
|
||||||
from agents.database_manager import DatabaseManager
|
from .database_manager import DatabaseManager
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"AgentOrchestrator",
|
"AgentOrchestrator",
|
||||||
|
|||||||
136
ai_software_factory/agents/change_summary.py
Normal file
136
ai_software_factory/agents/change_summary.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
"""Generate concise chat-friendly summaries of software generation results."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
|
||||||
|
class ChangeSummaryGenerator:
|
||||||
|
"""Create a readable overview of generated changes for chat responses."""
|
||||||
|
|
||||||
|
def __init__(self, ollama_url: str | None = None, model: str | None = None):
|
||||||
|
self.ollama_url = (ollama_url or settings.ollama_url).rstrip('/')
|
||||||
|
self.model = model or settings.OLLAMA_MODEL
|
||||||
|
|
||||||
|
async def summarize(self, context: dict) -> str:
|
||||||
|
"""Summarize project changes with Ollama, or fall back to a deterministic overview."""
|
||||||
|
summary, _trace = await self.summarize_with_trace(context)
|
||||||
|
return summary
|
||||||
|
|
||||||
|
async def summarize_with_trace(self, context: dict) -> tuple[str, dict]:
|
||||||
|
"""Summarize project changes with Ollama, or fall back to a deterministic overview."""
|
||||||
|
prompt = self._prompt(context)
|
||||||
|
system_prompt = (
|
||||||
|
'You write concise but informative mobile chat summaries of software delivery work. '
|
||||||
|
'Write 3 to 5 sentences. Mention the application goal, main delivered pieces, '
|
||||||
|
'technical direction, and what the user should expect next. Avoid markdown bullets.'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.post(
|
||||||
|
f'{self.ollama_url}/api/chat',
|
||||||
|
json={
|
||||||
|
'model': self.model,
|
||||||
|
'stream': False,
|
||||||
|
'messages': [
|
||||||
|
{
|
||||||
|
'role': 'system',
|
||||||
|
'content': system_prompt,
|
||||||
|
},
|
||||||
|
{'role': 'user', 'content': prompt},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
) as resp:
|
||||||
|
payload = await resp.json()
|
||||||
|
if 200 <= resp.status < 300:
|
||||||
|
content = payload.get('message', {}).get('content', '').strip()
|
||||||
|
if content:
|
||||||
|
return content, {
|
||||||
|
'stage': 'change_summary',
|
||||||
|
'provider': 'ollama',
|
||||||
|
'model': self.model,
|
||||||
|
'system_prompt': system_prompt,
|
||||||
|
'user_prompt': prompt,
|
||||||
|
'assistant_response': content,
|
||||||
|
'raw_response': payload,
|
||||||
|
'fallback_used': False,
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
fallback = self._fallback(context)
|
||||||
|
return fallback, {
|
||||||
|
'stage': 'change_summary',
|
||||||
|
'provider': 'fallback',
|
||||||
|
'model': self.model,
|
||||||
|
'system_prompt': system_prompt,
|
||||||
|
'user_prompt': prompt,
|
||||||
|
'assistant_response': fallback,
|
||||||
|
'raw_response': {'fallback': 'deterministic'},
|
||||||
|
'fallback_used': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _prompt(self, context: dict) -> str:
|
||||||
|
features = ', '.join(context.get('features') or []) or 'No explicit features recorded'
|
||||||
|
tech_stack = ', '.join(context.get('tech_stack') or []) or 'No explicit tech stack recorded'
|
||||||
|
changed_files = ', '.join(context.get('changed_files') or []) or 'No files recorded'
|
||||||
|
logs = ' | '.join((context.get('logs') or [])[:4]) or 'No log excerpts'
|
||||||
|
return (
|
||||||
|
f"Project name: {context.get('name', 'Unknown project')}\n"
|
||||||
|
f"Description: {context.get('description', '')}\n"
|
||||||
|
f"Features: {features}\n"
|
||||||
|
f"Tech stack: {tech_stack}\n"
|
||||||
|
f"Changed files: {changed_files}\n"
|
||||||
|
f"Repository: {context.get('repository_url') or 'No repository URL'}\n"
|
||||||
|
f"Pull request: {context.get('pull_request_url') or 'No pull request URL'}\n"
|
||||||
|
f"Pull request state: {context.get('pull_request_state') or 'No pull request state'}\n"
|
||||||
|
f"Status message: {context.get('message') or ''}\n"
|
||||||
|
f"Log excerpts: {logs}\n"
|
||||||
|
"Write a broad but phone-friendly summary of what was done."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _fallback(self, context: dict) -> str:
|
||||||
|
name = context.get('name', 'The project')
|
||||||
|
description = context.get('description') or 'a software request'
|
||||||
|
changed_files = context.get('changed_files') or []
|
||||||
|
features = context.get('features') or []
|
||||||
|
tech_stack = context.get('tech_stack') or []
|
||||||
|
repo_url = context.get('repository_url')
|
||||||
|
repo_status = context.get('repository_status')
|
||||||
|
pr_url = context.get('pull_request_url')
|
||||||
|
pr_state = context.get('pull_request_state')
|
||||||
|
|
||||||
|
first_sentence = f"{name} was generated from your request for {description}."
|
||||||
|
feature_sentence = (
|
||||||
|
f"The delivery focused on {', '.join(features[:3])}."
|
||||||
|
if features else
|
||||||
|
"The delivery focused on turning the request into an initial runnable application skeleton."
|
||||||
|
)
|
||||||
|
tech_sentence = (
|
||||||
|
f"The generated implementation currently targets {', '.join(tech_stack[:3])}."
|
||||||
|
if tech_stack else
|
||||||
|
"The implementation was created with the current default stack configured for the factory."
|
||||||
|
)
|
||||||
|
file_sentence = (
|
||||||
|
f"Key artifacts were updated across {len(changed_files)} files, including {', '.join(changed_files[:3])}."
|
||||||
|
if changed_files else
|
||||||
|
"The service completed the generation flow, but no changed file list was returned."
|
||||||
|
)
|
||||||
|
if repo_url:
|
||||||
|
repo_sentence = f"The resulting project is tracked at {repo_url}."
|
||||||
|
elif repo_status in {'pending', 'skipped', 'error'}:
|
||||||
|
repo_sentence = "Repository provisioning was not confirmed, so review the Gitea status in the dashboard before assuming a remote repo exists."
|
||||||
|
else:
|
||||||
|
repo_sentence = "The project is ready for further review in the dashboard."
|
||||||
|
if pr_url and pr_state == 'open':
|
||||||
|
pr_sentence = f"An open pull request is ready for review at {pr_url}, and later prompts will continue updating that same PR until it is merged."
|
||||||
|
elif pr_url:
|
||||||
|
pr_sentence = f"The latest pull request is available at {pr_url}."
|
||||||
|
else:
|
||||||
|
pr_sentence = "No pull request link was recorded for this delivery."
|
||||||
|
return ' '.join([first_sentence, feature_sentence, tech_sentence, file_sentence, repo_sentence, pr_sentence])
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,52 +1,155 @@
|
|||||||
"""Git manager for project operations."""
|
"""Git manager for project operations."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
|
||||||
class GitManager:
|
class GitManager:
|
||||||
"""Manages git operations for the project."""
|
"""Manages git operations for the project."""
|
||||||
|
|
||||||
def __init__(self, project_id: str):
|
def __init__(self, project_id: str, project_dir: str | None = None):
|
||||||
if not project_id:
|
if not project_id:
|
||||||
raise ValueError("project_id cannot be empty or None")
|
raise ValueError("project_id cannot be empty or None")
|
||||||
self.project_id = project_id
|
self.project_id = project_id
|
||||||
self.project_dir = f"{os.path.dirname(__file__)}/../../test-project/{project_id}"
|
if project_dir:
|
||||||
|
resolved = Path(project_dir).expanduser().resolve()
|
||||||
|
else:
|
||||||
|
project_path = Path(project_id)
|
||||||
|
if project_path.is_absolute() or len(project_path.parts) > 1:
|
||||||
|
resolved = project_path.expanduser().resolve()
|
||||||
|
else:
|
||||||
|
base_root = settings.projects_root
|
||||||
|
if base_root.name != "test-project":
|
||||||
|
base_root = base_root / "test-project"
|
||||||
|
resolved = (base_root / project_id).resolve()
|
||||||
|
self.project_dir = str(resolved)
|
||||||
|
|
||||||
|
def is_git_available(self) -> bool:
|
||||||
|
"""Return whether the git executable is available in the current environment."""
|
||||||
|
return shutil.which('git') is not None
|
||||||
|
|
||||||
|
def _ensure_git_available(self) -> None:
|
||||||
|
"""Raise a clear error when git is not installed in the runtime environment."""
|
||||||
|
if not self.is_git_available():
|
||||||
|
raise RuntimeError('git executable is not available in PATH')
|
||||||
|
|
||||||
|
def _run(self, args: list[str], env: dict | None = None, check: bool = True) -> subprocess.CompletedProcess:
|
||||||
|
"""Run a git command in the project directory."""
|
||||||
|
self._ensure_git_available()
|
||||||
|
return subprocess.run(
|
||||||
|
args,
|
||||||
|
check=check,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
cwd=self.project_dir,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
|
||||||
|
def has_repo(self) -> bool:
|
||||||
|
"""Return whether the project directory already contains a git repository."""
|
||||||
|
return Path(self.project_dir, '.git').exists()
|
||||||
|
|
||||||
def init_repo(self):
|
def init_repo(self):
|
||||||
"""Initialize git repository."""
|
"""Initialize git repository."""
|
||||||
os.makedirs(self.project_dir, exist_ok=True)
|
os.makedirs(self.project_dir, exist_ok=True)
|
||||||
os.chdir(self.project_dir)
|
self._run(["git", "init", "-b", "main"])
|
||||||
subprocess.run(["git", "init"], check=True, capture_output=True)
|
self._run(["git", "config", "user.name", "AI Software Factory"])
|
||||||
|
self._run(["git", "config", "user.email", "factory@local.invalid"])
|
||||||
|
|
||||||
def add_files(self, paths: list[str]):
|
def add_files(self, paths: list[str]):
|
||||||
"""Add files to git staging."""
|
"""Add files to git staging."""
|
||||||
subprocess.run(["git", "add"] + paths, check=True, capture_output=True)
|
self._run(["git", "add"] + paths)
|
||||||
|
|
||||||
def commit(self, message: str):
|
def checkout_branch(self, branch_name: str, create: bool = False, start_point: str | None = None) -> None:
|
||||||
|
"""Switch to a branch, optionally creating it from a start point."""
|
||||||
|
if create:
|
||||||
|
args = ["git", "checkout", "-B", branch_name]
|
||||||
|
if start_point:
|
||||||
|
args.append(start_point)
|
||||||
|
self._run(args)
|
||||||
|
return
|
||||||
|
self._run(["git", "checkout", branch_name])
|
||||||
|
|
||||||
|
def branch_exists(self, branch_name: str) -> bool:
|
||||||
|
"""Return whether a local branch exists."""
|
||||||
|
result = self._run(["git", "show-ref", "--verify", f"refs/heads/{branch_name}"], check=False)
|
||||||
|
return result.returncode == 0
|
||||||
|
|
||||||
|
def commit(self, message: str) -> str:
|
||||||
"""Create a git commit."""
|
"""Create a git commit."""
|
||||||
subprocess.run(
|
self._run(["git", "commit", "-m", message])
|
||||||
["git", "commit", "-m", message],
|
return self.current_head()
|
||||||
check=True,
|
|
||||||
capture_output=True
|
def create_empty_commit(self, message: str) -> str:
|
||||||
)
|
"""Create an empty commit."""
|
||||||
|
self._run(["git", "commit", "--allow-empty", "-m", message])
|
||||||
|
return self.current_head()
|
||||||
|
|
||||||
def push(self, remote: str = "origin", branch: str = "main"):
|
def push(self, remote: str = "origin", branch: str = "main"):
|
||||||
"""Push changes to remote."""
|
"""Push changes to remote."""
|
||||||
subprocess.run(
|
self._run(["git", "push", "-u", remote, branch])
|
||||||
["git", "push", "-u", remote, branch],
|
|
||||||
check=True,
|
def ensure_remote(self, remote: str, url: str) -> None:
|
||||||
capture_output=True
|
"""Create or update a remote URL."""
|
||||||
)
|
result = self._run(["git", "remote", "get-url", remote], check=False)
|
||||||
|
if result.returncode == 0:
|
||||||
|
self._run(["git", "remote", "set-url", remote, url])
|
||||||
|
else:
|
||||||
|
self._run(["git", "remote", "add", remote, url])
|
||||||
|
|
||||||
|
def push_with_credentials(
|
||||||
|
self,
|
||||||
|
remote_url: str,
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
remote: str = "origin",
|
||||||
|
branch: str = "main",
|
||||||
|
) -> None:
|
||||||
|
"""Push to a remote over HTTPS using an askpass helper."""
|
||||||
|
os.makedirs(self.project_dir, exist_ok=True)
|
||||||
|
self.ensure_remote(remote, remote_url)
|
||||||
|
helper_contents = "#!/bin/sh\ncase \"$1\" in\n *Username*) printf '%s\\n' \"$GIT_ASKPASS_USERNAME\" ;;\n *) printf '%s\\n' \"$GIT_ASKPASS_PASSWORD\" ;;\nesac\n"
|
||||||
|
helper_path: str | None = None
|
||||||
|
try:
|
||||||
|
with tempfile.NamedTemporaryFile('w', delete=False, dir=self.project_dir, prefix='git-askpass-', suffix='.sh') as helper_file:
|
||||||
|
helper_file.write(helper_contents)
|
||||||
|
helper_path = helper_file.name
|
||||||
|
os.chmod(helper_path, 0o700)
|
||||||
|
env = os.environ.copy()
|
||||||
|
env.update(
|
||||||
|
{
|
||||||
|
"GIT_TERMINAL_PROMPT": "0",
|
||||||
|
"GIT_ASKPASS": helper_path,
|
||||||
|
"GIT_ASKPASS_USERNAME": username,
|
||||||
|
"GIT_ASKPASS_PASSWORD": password,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self._run(["git", "push", "-u", remote, branch], env=env)
|
||||||
|
finally:
|
||||||
|
if helper_path:
|
||||||
|
Path(helper_path).unlink(missing_ok=True)
|
||||||
|
|
||||||
def create_branch(self, branch_name: str):
|
def create_branch(self, branch_name: str):
|
||||||
"""Create and switch to a new branch."""
|
"""Create and switch to a new branch."""
|
||||||
subprocess.run(
|
self._run(["git", "checkout", "-b", branch_name])
|
||||||
["git", "checkout", "-b", branch_name],
|
|
||||||
check=True,
|
def revert_commit(self, commit_hash: str, no_edit: bool = True) -> str:
|
||||||
capture_output=True
|
"""Revert a commit and return the new HEAD."""
|
||||||
)
|
args = ["git", "revert"]
|
||||||
|
if no_edit:
|
||||||
|
args.append("--no-edit")
|
||||||
|
args.append(commit_hash)
|
||||||
|
self._run(args)
|
||||||
|
return self.current_head()
|
||||||
|
|
||||||
def create_pr(
|
def create_pr(
|
||||||
self,
|
self,
|
||||||
@@ -70,6 +173,18 @@ class GitManager:
|
|||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
["git", "status", "--porcelain"],
|
["git", "status", "--porcelain"],
|
||||||
capture_output=True,
|
capture_output=True,
|
||||||
text=True
|
text=True,
|
||||||
|
cwd=self.project_dir,
|
||||||
)
|
)
|
||||||
return result.stdout.strip()
|
return result.stdout.strip()
|
||||||
|
|
||||||
|
def current_head(self) -> str:
|
||||||
|
"""Return the current commit hash."""
|
||||||
|
return self._run(["git", "rev-parse", "HEAD"]).stdout.strip()
|
||||||
|
|
||||||
|
def current_head_or_none(self) -> str | None:
|
||||||
|
"""Return the current commit hash when the repository already has commits."""
|
||||||
|
result = self._run(["git", "rev-parse", "HEAD"], check=False)
|
||||||
|
if result.returncode != 0:
|
||||||
|
return None
|
||||||
|
return result.stdout.strip() or None
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
"""Gitea API integration for commits and PRs."""
|
"""Gitea API integration for repository and pull request operations."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
import urllib.error
|
||||||
|
import urllib.request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
class GiteaAPI:
|
class GiteaAPI:
|
||||||
@@ -14,7 +16,7 @@ class GiteaAPI:
|
|||||||
self.repo = repo
|
self.repo = repo
|
||||||
self.headers = {
|
self.headers = {
|
||||||
"Authorization": f"token {token}",
|
"Authorization": f"token {token}",
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_config(self) -> dict:
|
def get_config(self) -> dict:
|
||||||
@@ -23,60 +25,173 @@ class GiteaAPI:
|
|||||||
token = os.getenv("GITEA_TOKEN", "")
|
token = os.getenv("GITEA_TOKEN", "")
|
||||||
owner = os.getenv("GITEA_OWNER", "ai-test")
|
owner = os.getenv("GITEA_OWNER", "ai-test")
|
||||||
repo = os.getenv("GITEA_REPO", "")
|
repo = os.getenv("GITEA_REPO", "")
|
||||||
|
|
||||||
# Allow empty repo for any repo mode (org/repo pattern)
|
|
||||||
if not repo:
|
|
||||||
repo = "any-repo" # Use this as a placeholder for org/repo operations
|
|
||||||
|
|
||||||
# Check for repo suffix pattern (e.g., repo-* for multiple repos)
|
|
||||||
repo_suffix = os.getenv("GITEA_REPO_SUFFIX", "")
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"base_url": base_url.rstrip("/"),
|
"base_url": base_url.rstrip("/"),
|
||||||
"token": token,
|
"token": token,
|
||||||
"owner": owner,
|
"owner": owner,
|
||||||
"repo": repo,
|
"repo": repo,
|
||||||
"repo_suffix": repo_suffix,
|
"supports_project_repos": not bool(repo),
|
||||||
"supports_any_repo": not repo or repo_suffix
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_auth_headers(self) -> dict:
|
def get_auth_headers(self) -> dict:
|
||||||
"""Get authentication headers."""
|
"""Get authentication headers."""
|
||||||
return {
|
return {
|
||||||
"Authorization": f"token {self.token}",
|
"Authorization": f"token {self.token}",
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
async def create_branch(self, branch: str, base: str = "main", owner: str | None = None, repo: str | None = None):
|
def _api_url(self, path: str) -> str:
|
||||||
"""Create a new branch.
|
"""Build a Gitea API URL from a relative path."""
|
||||||
|
return f"{self.base_url}/api/v1/{path.lstrip('/')}"
|
||||||
|
|
||||||
Args:
|
def build_repo_git_url(self, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||||
branch: Branch name to create
|
"""Build the clone URL for a repository."""
|
||||||
base: Base branch to create from (default: "main")
|
|
||||||
owner: Organization/owner name (optional, falls back to configured owner)
|
|
||||||
repo: Repository name (optional, falls back to configured repo)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
API response or error message
|
|
||||||
"""
|
|
||||||
# Use provided owner/repo or fall back to configured values
|
|
||||||
_owner = owner or self.owner
|
_owner = owner or self.owner
|
||||||
_repo = repo or self.repo
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo:
|
||||||
|
return None
|
||||||
|
return f"{self.base_url}/{_owner}/{_repo}.git"
|
||||||
|
|
||||||
url = f"{self.base_url}/repos/{_owner}/{_repo}/branches/{branch}"
|
def build_commit_url(self, commit_hash: str, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||||
payload = {"base": base}
|
"""Build a browser URL for a commit."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo or not commit_hash:
|
||||||
|
return None
|
||||||
|
return f"{self.base_url}/{_owner}/{_repo}/commit/{commit_hash}"
|
||||||
|
|
||||||
|
def build_compare_url(self, base_ref: str, head_ref: str, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||||
|
"""Build a browser URL for a compare view."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo or not base_ref or not head_ref:
|
||||||
|
return None
|
||||||
|
return f"{self.base_url}/{_owner}/{_repo}/compare/{base_ref}...{head_ref}"
|
||||||
|
|
||||||
|
def build_pull_request_url(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||||
|
"""Build a browser URL for a pull request."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo or not pr_number:
|
||||||
|
return None
|
||||||
|
return f"{self.base_url}/{_owner}/{_repo}/pulls/{pr_number}"
|
||||||
|
|
||||||
|
async def _request(self, method: str, path: str, payload: dict | None = None) -> dict:
|
||||||
|
"""Perform a Gitea API request and normalize the response."""
|
||||||
try:
|
try:
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
async with session.post(url, headers=self.get_auth_headers(), json=payload) as resp:
|
async with session.request(
|
||||||
if resp.status == 201:
|
method,
|
||||||
|
self._api_url(path),
|
||||||
|
headers=self.get_auth_headers(),
|
||||||
|
json=payload,
|
||||||
|
) as resp:
|
||||||
|
if resp.status in (200, 201):
|
||||||
return await resp.json()
|
return await resp.json()
|
||||||
else:
|
return {"error": await resp.text(), "status_code": resp.status}
|
||||||
return {"error": await resp.text()}
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"error": str(e)}
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
def _request_sync(self, method: str, path: str, payload: dict | None = None) -> dict:
|
||||||
|
"""Perform a synchronous Gitea API request."""
|
||||||
|
request = urllib.request.Request(
|
||||||
|
self._api_url(path),
|
||||||
|
headers=self.get_auth_headers(),
|
||||||
|
method=method.upper(),
|
||||||
|
)
|
||||||
|
data = None
|
||||||
|
if payload is not None:
|
||||||
|
data = json.dumps(payload).encode('utf-8')
|
||||||
|
request.data = data
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(request) as response:
|
||||||
|
body = response.read().decode('utf-8')
|
||||||
|
return json.loads(body) if body else {}
|
||||||
|
except urllib.error.HTTPError as exc:
|
||||||
|
try:
|
||||||
|
body = exc.read().decode('utf-8')
|
||||||
|
except Exception:
|
||||||
|
body = str(exc)
|
||||||
|
return {'error': body, 'status_code': exc.code}
|
||||||
|
except Exception as exc:
|
||||||
|
return {'error': str(exc)}
|
||||||
|
|
||||||
|
def build_project_repo_name(self, project_id: str, project_name: str | None = None) -> str:
|
||||||
|
"""Build a repository name for a generated project."""
|
||||||
|
preferred = (project_name or project_id or "project").strip().lower().replace(" ", "-")
|
||||||
|
sanitized = "".join(ch if ch.isalnum() or ch in {"-", "_"} else "-" for ch in preferred)
|
||||||
|
while "--" in sanitized:
|
||||||
|
sanitized = sanitized.replace("--", "-")
|
||||||
|
return sanitized.strip("-") or project_id
|
||||||
|
|
||||||
|
async def create_repo(
|
||||||
|
self,
|
||||||
|
repo_name: str,
|
||||||
|
owner: str | None = None,
|
||||||
|
description: str | None = None,
|
||||||
|
private: bool = False,
|
||||||
|
auto_init: bool = True,
|
||||||
|
) -> dict:
|
||||||
|
"""Create a repository inside the configured organization."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
if not _owner:
|
||||||
|
return {"error": "Owner or organization is required"}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"name": repo_name,
|
||||||
|
"description": description or f"AI-generated project repository for {repo_name}",
|
||||||
|
"private": private,
|
||||||
|
"auto_init": auto_init,
|
||||||
|
"default_branch": "main",
|
||||||
|
}
|
||||||
|
result = await self._request("POST", f"orgs/{_owner}/repos", payload)
|
||||||
|
if result.get("status_code") == 409:
|
||||||
|
existing = await self.get_repo_info(owner=_owner, repo=repo_name)
|
||||||
|
if not existing.get("error"):
|
||||||
|
existing["status"] = "exists"
|
||||||
|
return existing
|
||||||
|
if not result.get("error"):
|
||||||
|
result.setdefault("status", "created")
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def delete_repo(self, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Delete a repository from the configured organization/user."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo:
|
||||||
|
return {'error': 'Owner and repository name are required'}
|
||||||
|
result = await self._request('DELETE', f'repos/{_owner}/{_repo}')
|
||||||
|
if not result.get('error'):
|
||||||
|
result.setdefault('status', 'deleted')
|
||||||
|
return result
|
||||||
|
|
||||||
|
def delete_repo_sync(self, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Synchronously delete a repository from the configured organization/user."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
if not _owner or not _repo:
|
||||||
|
return {'error': 'Owner and repository name are required'}
|
||||||
|
result = self._request_sync('DELETE', f'repos/{_owner}/{_repo}')
|
||||||
|
if not result.get('error'):
|
||||||
|
result.setdefault('status', 'deleted')
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def get_current_user(self) -> dict:
|
||||||
|
"""Get the user associated with the configured token."""
|
||||||
|
return await self._request("GET", "user")
|
||||||
|
|
||||||
|
async def create_branch(self, branch: str, base: str = "main", owner: str | None = None, repo: str | None = None):
|
||||||
|
"""Create a new branch."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request(
|
||||||
|
"POST",
|
||||||
|
f"repos/{_owner}/{_repo}/branches",
|
||||||
|
{"new_branch_name": branch, "old_ref_name": base},
|
||||||
|
)
|
||||||
|
|
||||||
async def create_pull_request(
|
async def create_pull_request(
|
||||||
self,
|
self,
|
||||||
title: str,
|
title: str,
|
||||||
@@ -84,43 +199,156 @@ class GiteaAPI:
|
|||||||
owner: str,
|
owner: str,
|
||||||
repo: str,
|
repo: str,
|
||||||
base: str = "main",
|
base: str = "main",
|
||||||
head: str | None = None
|
head: str | None = None,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""Create a pull request.
|
"""Create a pull request."""
|
||||||
|
|
||||||
Args:
|
|
||||||
title: PR title
|
|
||||||
body: PR description
|
|
||||||
owner: Organization/owner name
|
|
||||||
repo: Repository name
|
|
||||||
base: Base branch (default: "main")
|
|
||||||
head: Head branch (optional, auto-generated if not provided)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
API response or error message
|
|
||||||
"""
|
|
||||||
_owner = owner or self.owner
|
_owner = owner or self.owner
|
||||||
_repo = repo or self.repo
|
_repo = repo or self.repo
|
||||||
|
|
||||||
url = f"{self.base_url}/repos/{_owner}/{_repo}/pulls"
|
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"title": title,
|
"title": title,
|
||||||
"body": body,
|
"body": body,
|
||||||
"base": {"branch": base},
|
"base": base,
|
||||||
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}"
|
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||||
}
|
}
|
||||||
|
return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload)
|
||||||
|
|
||||||
try:
|
async def list_pull_requests(
|
||||||
import aiohttp
|
self,
|
||||||
async with aiohttp.ClientSession() as session:
|
owner: str | None = None,
|
||||||
async with session.post(url, headers=self.get_auth_headers(), json=payload) as resp:
|
repo: str | None = None,
|
||||||
if resp.status == 201:
|
state: str = 'open',
|
||||||
return await resp.json()
|
) -> dict | list:
|
||||||
else:
|
"""List pull requests for a repository."""
|
||||||
return {"error": await resp.text()}
|
_owner = owner or self.owner
|
||||||
except Exception as e:
|
_repo = repo or self.repo
|
||||||
return {"error": str(e)}
|
return await self._request("GET", f"repos/{_owner}/{_repo}/pulls?state={state}")
|
||||||
|
|
||||||
|
def list_pull_requests_sync(
|
||||||
|
self,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
state: str = 'open',
|
||||||
|
) -> dict | list:
|
||||||
|
"""Synchronously list pull requests for a repository."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/pulls?state={state}")
|
||||||
|
|
||||||
|
async def list_repositories(self, owner: str | None = None) -> dict | list:
|
||||||
|
"""List repositories within the configured organization."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
return await self._request("GET", f"orgs/{_owner}/repos")
|
||||||
|
|
||||||
|
def list_repositories_sync(self, owner: str | None = None) -> dict | list:
|
||||||
|
"""Synchronously list repositories within the configured organization."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
return self._request_sync("GET", f"orgs/{_owner}/repos")
|
||||||
|
|
||||||
|
async def list_branches(self, owner: str | None = None, repo: str | None = None) -> dict | list:
|
||||||
|
"""List repository branches."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/branches")
|
||||||
|
|
||||||
|
def list_branches_sync(self, owner: str | None = None, repo: str | None = None) -> dict | list:
|
||||||
|
"""Synchronously list repository branches."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/branches")
|
||||||
|
|
||||||
|
async def list_issues(
|
||||||
|
self,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
state: str = 'open',
|
||||||
|
) -> dict | list:
|
||||||
|
"""List repository issues, excluding pull requests at the consumer layer."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/issues?state={state}")
|
||||||
|
|
||||||
|
def list_issues_sync(
|
||||||
|
self,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
state: str = 'open',
|
||||||
|
) -> dict | list:
|
||||||
|
"""Synchronously list repository issues."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/issues?state={state}")
|
||||||
|
|
||||||
|
async def get_issue(self, issue_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Return one repository issue by number."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/issues/{issue_number}")
|
||||||
|
|
||||||
|
def get_issue_sync(self, issue_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Synchronously return one repository issue by number."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/issues/{issue_number}")
|
||||||
|
|
||||||
|
async def list_repo_commits(
|
||||||
|
self,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
limit: int = 25,
|
||||||
|
branch: str | None = None,
|
||||||
|
) -> dict | list:
|
||||||
|
"""List recent commits for a repository."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
branch_query = f"&sha={branch}" if branch else ""
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/commits?limit={limit}{branch_query}")
|
||||||
|
|
||||||
|
def list_repo_commits_sync(
|
||||||
|
self,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
limit: int = 25,
|
||||||
|
branch: str | None = None,
|
||||||
|
) -> dict | list:
|
||||||
|
"""Synchronously list recent commits for a repository."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
branch_query = f"&sha={branch}" if branch else ""
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/commits?limit={limit}{branch_query}")
|
||||||
|
|
||||||
|
async def get_commit(
|
||||||
|
self,
|
||||||
|
commit_hash: str,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Return one commit by hash."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/git/commits/{commit_hash}")
|
||||||
|
|
||||||
|
def get_commit_sync(
|
||||||
|
self,
|
||||||
|
commit_hash: str,
|
||||||
|
owner: str | None = None,
|
||||||
|
repo: str | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Synchronously return one commit by hash."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/git/commits/{commit_hash}")
|
||||||
|
|
||||||
|
async def get_pull_request(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Return one pull request by number."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return await self._request("GET", f"repos/{_owner}/{_repo}/pulls/{pr_number}")
|
||||||
|
|
||||||
|
def get_pull_request_sync(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
|
"""Synchronously return one pull request by number."""
|
||||||
|
_owner = owner or self.owner
|
||||||
|
_repo = repo or self.repo
|
||||||
|
return self._request_sync("GET", f"repos/{_owner}/{_repo}/pulls/{pr_number}")
|
||||||
|
|
||||||
async def push_commit(
|
async def push_commit(
|
||||||
self,
|
self,
|
||||||
@@ -128,25 +356,13 @@ class GiteaAPI:
|
|||||||
files: list[dict],
|
files: list[dict],
|
||||||
message: str,
|
message: str,
|
||||||
owner: str | None = None,
|
owner: str | None = None,
|
||||||
repo: str | None = None
|
repo: str | None = None,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""
|
"""Push files to a branch.
|
||||||
Push files to a branch.
|
|
||||||
|
|
||||||
In production, this would use gitea's API or git push.
|
In production, this would use gitea's API or git push.
|
||||||
For now, we'll simulate the operation.
|
For now, this remains simulated.
|
||||||
|
|
||||||
Args:
|
|
||||||
branch: Branch name
|
|
||||||
files: List of files to push
|
|
||||||
message: Commit message
|
|
||||||
owner: Organization/owner name (optional, falls back to configured owner)
|
|
||||||
repo: Repository name (optional, falls back to configured repo)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Status response
|
|
||||||
"""
|
"""
|
||||||
# Use provided owner/repo or fall back to configured values
|
|
||||||
_owner = owner or self.owner
|
_owner = owner or self.owner
|
||||||
_repo = repo or self.repo
|
_repo = repo or self.repo
|
||||||
|
|
||||||
@@ -156,35 +372,15 @@ class GiteaAPI:
|
|||||||
"message": message,
|
"message": message,
|
||||||
"files": files,
|
"files": files,
|
||||||
"owner": _owner,
|
"owner": _owner,
|
||||||
"repo": _repo
|
"repo": _repo,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_repo_info(self, owner: str | None = None, repo: str | None = None) -> dict:
|
async def get_repo_info(self, owner: str | None = None, repo: str | None = None) -> dict:
|
||||||
"""Get repository information.
|
"""Get repository information."""
|
||||||
|
|
||||||
Args:
|
|
||||||
owner: Organization/owner name (optional, falls back to configured owner)
|
|
||||||
repo: Repository name (optional, falls back to configured repo)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Repository info or error message
|
|
||||||
"""
|
|
||||||
# Use provided owner/repo or fall back to configured values
|
|
||||||
_owner = owner or self.owner
|
_owner = owner or self.owner
|
||||||
_repo = repo or self.repo
|
_repo = repo or self.repo
|
||||||
|
|
||||||
if not _repo:
|
if not _repo:
|
||||||
return {"error": "Repository name required for org operations"}
|
return {"error": "Repository name required for org operations"}
|
||||||
|
|
||||||
url = f"{self.base_url}/repos/{_owner}/{_repo}"
|
return await self._request("GET", f"repos/{_owner}/{_repo}")
|
||||||
|
|
||||||
try:
|
|
||||||
import aiohttp
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get(url, headers=self.get_auth_headers()) as resp:
|
|
||||||
if resp.status == 200:
|
|
||||||
return await resp.json()
|
|
||||||
else:
|
|
||||||
return {"error": await resp.text()}
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e)}
|
|
||||||
@@ -1,8 +1,14 @@
|
|||||||
"""n8n setup agent for automatic webhook configuration."""
|
"""n8n setup agent for automatic webhook configuration."""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from urllib import error as urllib_error
|
||||||
|
from urllib import request as urllib_request
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from config import settings
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
|
||||||
class N8NSetupAgent:
|
class N8NSetupAgent:
|
||||||
@@ -22,94 +28,425 @@ class N8NSetupAgent:
|
|||||||
self.webhook_token = webhook_token
|
self.webhook_token = webhook_token
|
||||||
self.session = None
|
self.session = None
|
||||||
|
|
||||||
|
def _api_path(self, path: str) -> str:
|
||||||
|
"""Build a full n8n API URL for a given endpoint path."""
|
||||||
|
if path.startswith("http://") or path.startswith("https://"):
|
||||||
|
return path
|
||||||
|
trimmed = path.lstrip("/")
|
||||||
|
if trimmed.startswith("api/"):
|
||||||
|
return f"{self.api_url}/{trimmed}"
|
||||||
|
return f"{self.api_url}/api/v1/{trimmed}"
|
||||||
|
|
||||||
def get_auth_headers(self) -> dict:
|
def get_auth_headers(self) -> dict:
|
||||||
"""Get authentication headers for n8n API using webhook token."""
|
"""Get authentication headers for n8n API using webhook token."""
|
||||||
return {
|
headers = {
|
||||||
"n8n-no-credentials": "true",
|
"n8n-no-credentials": "true",
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"User-Agent": "AI-Software-Factory"
|
"User-Agent": "AI-Software-Factory"
|
||||||
}
|
}
|
||||||
|
if self.webhook_token:
|
||||||
|
headers["X-N8N-API-KEY"] = self.webhook_token
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def _extract_message(self, payload: object) -> str:
|
||||||
|
"""Extract a useful message from an n8n response payload."""
|
||||||
|
if isinstance(payload, dict):
|
||||||
|
for key in ("message", "error", "reason", "hint", "text"):
|
||||||
|
value = payload.get(key)
|
||||||
|
if value:
|
||||||
|
return str(value)
|
||||||
|
if payload:
|
||||||
|
return json.dumps(payload)
|
||||||
|
if payload is None:
|
||||||
|
return "No response body"
|
||||||
|
return str(payload)
|
||||||
|
|
||||||
|
def _normalize_success(self, method: str, url: str, status_code: int, payload: object) -> dict:
|
||||||
|
"""Normalize a successful n8n API response."""
|
||||||
|
if isinstance(payload, dict):
|
||||||
|
response = dict(payload)
|
||||||
|
response.setdefault("status_code", status_code)
|
||||||
|
response.setdefault("url", url)
|
||||||
|
response.setdefault("method", method)
|
||||||
|
return response
|
||||||
|
return {"data": payload, "status_code": status_code, "url": url, "method": method}
|
||||||
|
|
||||||
|
def _normalize_error(self, method: str, url: str, status_code: int | None, payload: object) -> dict:
|
||||||
|
"""Normalize an error response with enough detail for diagnostics."""
|
||||||
|
message = self._extract_message(payload)
|
||||||
|
prefix = f"{method} {url}"
|
||||||
|
if status_code is not None:
|
||||||
|
return {
|
||||||
|
"error": f"{prefix} returned {status_code}: {message}",
|
||||||
|
"message": message,
|
||||||
|
"status_code": status_code,
|
||||||
|
"url": url,
|
||||||
|
"method": method,
|
||||||
|
"payload": payload,
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
"error": f"{prefix} failed: {message}",
|
||||||
|
"message": message,
|
||||||
|
"status_code": None,
|
||||||
|
"url": url,
|
||||||
|
"method": method,
|
||||||
|
"payload": payload,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _health_check_row(self, name: str, result: dict) -> dict:
|
||||||
|
"""Convert a raw request result into a UI/API-friendly health check row."""
|
||||||
|
return {
|
||||||
|
"name": name,
|
||||||
|
"ok": not bool(result.get("error")),
|
||||||
|
"url": result.get("url"),
|
||||||
|
"method": result.get("method", "GET"),
|
||||||
|
"status_code": result.get("status_code"),
|
||||||
|
"message": result.get("message") or ("ok" if not result.get("error") else result.get("error")),
|
||||||
|
}
|
||||||
|
|
||||||
|
def _health_suggestion(self, checks: list[dict]) -> str | None:
|
||||||
|
"""Return a suggestion based on failed n8n health checks."""
|
||||||
|
status_codes = {check.get("status_code") for check in checks if check.get("status_code") is not None}
|
||||||
|
if status_codes and status_codes.issubset({404}):
|
||||||
|
return "Verify N8N_API_URL points to the base n8n URL, for example http://host:5678, not /api/v1 or a webhook URL."
|
||||||
|
if status_codes & {401, 403}:
|
||||||
|
return "Check the configured n8n API key or authentication method."
|
||||||
|
return "Verify the n8n URL, API key, and that the n8n API is reachable from this container."
|
||||||
|
|
||||||
|
def _build_health_result(self, healthz_result: dict, workflows_result: dict) -> dict:
|
||||||
|
"""Build a consolidated health result from the performed checks."""
|
||||||
|
checks = [
|
||||||
|
self._health_check_row("healthz", healthz_result),
|
||||||
|
self._health_check_row("workflows", workflows_result),
|
||||||
|
]
|
||||||
|
|
||||||
|
if not healthz_result.get("error"):
|
||||||
|
return {
|
||||||
|
"status": "ok",
|
||||||
|
"message": "n8n is reachable via /healthz.",
|
||||||
|
"api_url": self.api_url,
|
||||||
|
"auth_configured": bool(self.webhook_token),
|
||||||
|
"checked_via": "healthz",
|
||||||
|
"checks": checks,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not workflows_result.get("error"):
|
||||||
|
workflows = workflows_result.get("data")
|
||||||
|
workflow_count = len(workflows) if isinstance(workflows, list) else None
|
||||||
|
return {
|
||||||
|
"status": "ok",
|
||||||
|
"message": "n8n is reachable via the workflows API, but /healthz is unavailable.",
|
||||||
|
"api_url": self.api_url,
|
||||||
|
"auth_configured": bool(self.webhook_token),
|
||||||
|
"checked_via": "workflows",
|
||||||
|
"workflow_count": workflow_count,
|
||||||
|
"checks": checks,
|
||||||
|
}
|
||||||
|
|
||||||
|
suggestion = self._health_suggestion(checks)
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"error": "n8n health checks failed",
|
||||||
|
"message": "n8n health checks failed.",
|
||||||
|
"api_url": self.api_url,
|
||||||
|
"auth_configured": bool(self.webhook_token),
|
||||||
|
"checked_via": "none",
|
||||||
|
"checks": checks,
|
||||||
|
"suggestion": suggestion,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _request(self, method: str, path: str, **kwargs) -> dict:
|
||||||
|
"""Send a request to n8n and normalize the response."""
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
headers = kwargs.pop("headers", None) or self.get_auth_headers()
|
||||||
|
url = self._api_path(path)
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.request(method, url, headers=headers, **kwargs) as resp:
|
||||||
|
content_type = resp.headers.get("Content-Type", "")
|
||||||
|
if "application/json" in content_type:
|
||||||
|
payload = await resp.json()
|
||||||
|
else:
|
||||||
|
payload = {"text": await resp.text()}
|
||||||
|
|
||||||
|
if 200 <= resp.status < 300:
|
||||||
|
return self._normalize_success(method, url, resp.status, payload)
|
||||||
|
|
||||||
|
return self._normalize_error(method, url, resp.status, payload)
|
||||||
|
except Exception as e:
|
||||||
|
return self._normalize_error(method, url, None, {"message": str(e)})
|
||||||
|
|
||||||
|
def _request_sync(self, method: str, path: str, **kwargs) -> dict:
|
||||||
|
"""Send a synchronous request to n8n for dashboard health snapshots."""
|
||||||
|
headers = kwargs.pop("headers", None) or self.get_auth_headers()
|
||||||
|
payload = kwargs.pop("json", None)
|
||||||
|
timeout = kwargs.pop("timeout", 5)
|
||||||
|
url = self._api_path(path)
|
||||||
|
data = None
|
||||||
|
if payload is not None:
|
||||||
|
data = json.dumps(payload).encode("utf-8")
|
||||||
|
req = urllib_request.Request(url, data=data, headers=headers, method=method)
|
||||||
|
try:
|
||||||
|
with urllib_request.urlopen(req, timeout=timeout) as resp:
|
||||||
|
raw_body = resp.read().decode("utf-8")
|
||||||
|
content_type = resp.headers.get("Content-Type", "")
|
||||||
|
if "application/json" in content_type and raw_body:
|
||||||
|
parsed = json.loads(raw_body)
|
||||||
|
elif raw_body:
|
||||||
|
parsed = {"text": raw_body}
|
||||||
|
else:
|
||||||
|
parsed = {}
|
||||||
|
return self._normalize_success(method, url, resp.status, parsed)
|
||||||
|
except urllib_error.HTTPError as exc:
|
||||||
|
raw_body = exc.read().decode("utf-8") if exc.fp else ""
|
||||||
|
try:
|
||||||
|
parsed = json.loads(raw_body) if raw_body else {}
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
parsed = {"text": raw_body} if raw_body else {}
|
||||||
|
return self._normalize_error(method, url, exc.code, parsed)
|
||||||
|
except Exception as exc:
|
||||||
|
return self._normalize_error(method, url, None, {"message": str(exc)})
|
||||||
|
|
||||||
async def get_workflow(self, workflow_name: str) -> Optional[dict]:
|
async def get_workflow(self, workflow_name: str) -> Optional[dict]:
|
||||||
"""Get a workflow by name."""
|
"""Get a workflow by name."""
|
||||||
import aiohttp
|
workflows = await self.list_workflows()
|
||||||
try:
|
if isinstance(workflows, dict) and workflows.get("error"):
|
||||||
async with aiohttp.ClientSession() as session:
|
return workflows
|
||||||
# Use the webhook URL directly for workflow operations
|
for workflow in workflows:
|
||||||
# n8n supports calling workflows via /webhook/ path with query params
|
if workflow.get("name") == workflow_name:
|
||||||
# For API token auth, n8n checks the token against webhook credentials
|
return workflow
|
||||||
headers = self.get_auth_headers()
|
return None
|
||||||
|
|
||||||
# Try standard workflow endpoint first (for API token setup)
|
|
||||||
async with session.get(
|
|
||||||
f"{self.api_url}/workflow/{workflow_name}.json",
|
|
||||||
headers=headers
|
|
||||||
) as resp:
|
|
||||||
if resp.status == 200:
|
|
||||||
return await resp.json()
|
|
||||||
elif resp.status == 404:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return {"error": f"Status {resp.status}"}
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e)}
|
|
||||||
|
|
||||||
async def create_workflow(self, workflow_json: dict) -> dict:
|
async def create_workflow(self, workflow_json: dict) -> dict:
|
||||||
"""Create or update a workflow."""
|
"""Create or update a workflow."""
|
||||||
import aiohttp
|
return await self._request("POST", "workflows", json=self._workflow_payload(workflow_json))
|
||||||
try:
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
# Use POST to create/update workflow
|
|
||||||
headers = self.get_auth_headers()
|
|
||||||
|
|
||||||
async with session.post(
|
def _workflow_payload(self, workflow_json: dict) -> dict:
|
||||||
f"{self.api_url}/workflow",
|
"""Return a workflow payload without server-managed read-only fields."""
|
||||||
headers=headers,
|
payload = dict(workflow_json)
|
||||||
json=workflow_json
|
payload.pop("active", None)
|
||||||
) as resp:
|
payload.pop("id", None)
|
||||||
if resp.status == 200 or resp.status == 201:
|
payload.pop("createdAt", None)
|
||||||
return await resp.json()
|
payload.pop("updatedAt", None)
|
||||||
else:
|
payload.pop("versionId", None)
|
||||||
return {"error": f"Status {resp.status}: {await resp.text()}"}
|
return payload
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e)}
|
async def _update_workflow_via_put(self, workflow_id: str, workflow_json: dict) -> dict:
|
||||||
|
"""Fallback update path for n8n instances that only support PUT."""
|
||||||
|
return await self._request("PUT", f"workflows/{workflow_id}", json=self._workflow_payload(workflow_json))
|
||||||
|
|
||||||
|
async def update_workflow(self, workflow_id: str, workflow_json: dict) -> dict:
|
||||||
|
"""Update an existing workflow."""
|
||||||
|
result = await self._request("PATCH", f"workflows/{workflow_id}", json=self._workflow_payload(workflow_json))
|
||||||
|
if result.get("status_code") == 405:
|
||||||
|
fallback = await self._update_workflow_via_put(workflow_id, workflow_json)
|
||||||
|
if not fallback.get("error") and isinstance(fallback, dict):
|
||||||
|
fallback.setdefault("method", "PUT")
|
||||||
|
return fallback
|
||||||
|
return result
|
||||||
|
|
||||||
async def enable_workflow(self, workflow_id: str) -> dict:
|
async def enable_workflow(self, workflow_id: str) -> dict:
|
||||||
"""Enable a workflow."""
|
"""Enable a workflow."""
|
||||||
import aiohttp
|
result = await self._request("POST", f"workflows/{workflow_id}/activate")
|
||||||
try:
|
if result.get("error"):
|
||||||
async with aiohttp.ClientSession() as session:
|
fallback = await self._request("PATCH", f"workflows/{workflow_id}", json={"active": True})
|
||||||
headers = self.get_auth_headers()
|
if fallback.get("error"):
|
||||||
|
if fallback.get("status_code") == 405:
|
||||||
async with session.post(
|
put_fallback = await self._request("PUT", f"workflows/{workflow_id}", json={"active": True})
|
||||||
f"{self.api_url}/workflow/{workflow_id}/toggle",
|
if put_fallback.get("error"):
|
||||||
headers=headers,
|
return put_fallback
|
||||||
json={"state": True}
|
return {"success": True, "id": workflow_id, "method": "put"}
|
||||||
) as resp:
|
return fallback
|
||||||
if resp.status in (200, 201):
|
return {"success": True, "id": workflow_id, "method": "patch"}
|
||||||
return {"success": True, "id": workflow_id}
|
return {"success": True, "id": workflow_id, "method": "activate"}
|
||||||
else:
|
|
||||||
return {"error": f"Status {resp.status}: {await resp.text()}"}
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e)}
|
|
||||||
|
|
||||||
async def list_workflows(self) -> list:
|
async def list_workflows(self) -> list:
|
||||||
"""List all workflows."""
|
"""List all workflows."""
|
||||||
import aiohttp
|
result = await self._request("GET", "workflows")
|
||||||
try:
|
if result.get("error"):
|
||||||
async with aiohttp.ClientSession() as session:
|
return result
|
||||||
headers = self.get_auth_headers()
|
if isinstance(result, list):
|
||||||
|
return result
|
||||||
|
if isinstance(result, dict):
|
||||||
|
for key in ("data", "workflows"):
|
||||||
|
value = result.get(key)
|
||||||
|
if isinstance(value, list):
|
||||||
|
return value
|
||||||
|
return []
|
||||||
|
|
||||||
async with session.get(
|
def build_telegram_workflow(self, webhook_path: str, backend_url: str, allowed_chat_id: str | None = None) -> dict:
|
||||||
f"{self.api_url}/workflow",
|
"""Build the Telegram-to-backend workflow definition."""
|
||||||
headers=headers
|
normalized_path = webhook_path.strip().strip("/") or "telegram"
|
||||||
) as resp:
|
allowed_chat = json.dumps(str(allowed_chat_id)) if allowed_chat_id else "''"
|
||||||
if resp.status == 200:
|
return {
|
||||||
return await resp.json()
|
"name": "Telegram to AI Software Factory",
|
||||||
else:
|
"settings": {"executionOrder": "v1"},
|
||||||
return []
|
"nodes": [
|
||||||
except Exception as e:
|
{
|
||||||
|
"id": "webhook-node",
|
||||||
|
"name": "Telegram Webhook",
|
||||||
|
"type": "n8n-nodes-base.webhook",
|
||||||
|
"typeVersion": 2,
|
||||||
|
"position": [-520, 120],
|
||||||
|
"parameters": {
|
||||||
|
"httpMethod": "POST",
|
||||||
|
"path": normalized_path,
|
||||||
|
"responseMode": "responseNode",
|
||||||
|
"options": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "parse-node",
|
||||||
|
"name": "Prepare Freeform Request",
|
||||||
|
"type": "n8n-nodes-base.code",
|
||||||
|
"typeVersion": 2,
|
||||||
|
"position": [-200, 120],
|
||||||
|
"parameters": {
|
||||||
|
"language": "javaScript",
|
||||||
|
"jsCode": f"const allowedChatId = {allowed_chat};\nconst body = $json.body ?? $json;\nconst message = body.message ?? body;\nconst text = String(message.text ?? '').trim();\nconst chatId = String(message.chat?.id ?? '');\nif (allowedChatId && chatId !== allowedChatId) {{\n return [{{ json: {{ ignored: true, message: `Ignoring message from chat ${{chatId}}`, prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];\n}}\nreturn [{{ json: {{ prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "api-node",
|
||||||
|
"name": "AI Software Factory API",
|
||||||
|
"type": "n8n-nodes-base.httpRequest",
|
||||||
|
"typeVersion": 4.2,
|
||||||
|
"position": [120, 120],
|
||||||
|
"parameters": {
|
||||||
|
"method": "POST",
|
||||||
|
"url": backend_url,
|
||||||
|
"sendBody": True,
|
||||||
|
"specifyBody": "json",
|
||||||
|
"jsonBody": "={{ $json }}",
|
||||||
|
"options": {"response": {"response": {"fullResponse": False}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "response-node",
|
||||||
|
"name": "Respond to Telegram Webhook",
|
||||||
|
"type": "n8n-nodes-base.respondToWebhook",
|
||||||
|
"typeVersion": 1.2,
|
||||||
|
"position": [420, 120],
|
||||||
|
"parameters": {
|
||||||
|
"respondWith": "json",
|
||||||
|
"responseBody": "={{ $json }}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"connections": {
|
||||||
|
"Telegram Webhook": {"main": [[{"node": "Prepare Freeform Request", "type": "main", "index": 0}]]},
|
||||||
|
"Prepare Freeform Request": {"main": [[{"node": "AI Software Factory API", "type": "main", "index": 0}]]},
|
||||||
|
"AI Software Factory API": {"main": [[{"node": "Respond to Telegram Webhook", "type": "main", "index": 0}]]},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def build_telegram_trigger_workflow(
|
||||||
|
self,
|
||||||
|
backend_url: str,
|
||||||
|
credential_name: str,
|
||||||
|
allowed_chat_id: str | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Build a production Telegram Trigger based workflow."""
|
||||||
|
allowed_chat = json.dumps(str(allowed_chat_id)) if allowed_chat_id else "''"
|
||||||
|
return {
|
||||||
|
"name": "Telegram to AI Software Factory",
|
||||||
|
"settings": {"executionOrder": "v1"},
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "telegram-trigger-node",
|
||||||
|
"name": "Telegram Trigger",
|
||||||
|
"type": "n8n-nodes-base.telegramTrigger",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [-520, 120],
|
||||||
|
"parameters": {"updates": ["message", "channel_post"]},
|
||||||
|
"credentials": {"telegramApi": {"name": credential_name}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "filter-node",
|
||||||
|
"name": "Prepare Freeform Request",
|
||||||
|
"type": "n8n-nodes-base.code",
|
||||||
|
"typeVersion": 2,
|
||||||
|
"position": [-180, 120],
|
||||||
|
"parameters": {
|
||||||
|
"language": "javaScript",
|
||||||
|
"jsCode": f"const allowedChatId = {allowed_chat};\nconst message = $json.message ?? $json.channel_post ?? $json;\nconst text = String(message.text ?? '').trim();\nconst chatId = String(message.chat?.id ?? '');\nif (!text) return [];\nif (allowedChatId && chatId !== allowedChatId) return [];\nreturn [{{ json: {{ prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "api-node",
|
||||||
|
"name": "AI Software Factory API",
|
||||||
|
"type": "n8n-nodes-base.httpRequest",
|
||||||
|
"typeVersion": 4.2,
|
||||||
|
"position": [120, 120],
|
||||||
|
"parameters": {
|
||||||
|
"method": "POST",
|
||||||
|
"url": backend_url,
|
||||||
|
"sendBody": True,
|
||||||
|
"specifyBody": "json",
|
||||||
|
"jsonBody": "={{ $json }}",
|
||||||
|
"options": {"response": {"response": {"fullResponse": False}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reply-node",
|
||||||
|
"name": "Send Telegram Update",
|
||||||
|
"type": "n8n-nodes-base.telegram",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [420, 120],
|
||||||
|
"parameters": {
|
||||||
|
"resource": "message",
|
||||||
|
"operation": "sendMessage",
|
||||||
|
"chatId": "={{ ($('Telegram Trigger').item.json.message ?? $('Telegram Trigger').item.json.channel_post).chat.id }}",
|
||||||
|
"text": "={{ $json.summary_message || $json.data?.summary_message || $json.message || 'Software generation request accepted' }}",
|
||||||
|
},
|
||||||
|
"credentials": {"telegramApi": {"name": credential_name}},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"connections": {
|
||||||
|
"Telegram Trigger": {"main": [[{"node": "Prepare Freeform Request", "type": "main", "index": 0}]]},
|
||||||
|
"Prepare Freeform Request": {"main": [[{"node": "AI Software Factory API", "type": "main", "index": 0}]]},
|
||||||
|
"AI Software Factory API": {"main": [[{"node": "Send Telegram Update", "type": "main", "index": 0}]]},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async def list_credentials(self) -> list:
|
||||||
|
"""List n8n credentials."""
|
||||||
|
result = await self._request("GET", "credentials")
|
||||||
|
if result.get("error"):
|
||||||
return []
|
return []
|
||||||
|
if isinstance(result, list):
|
||||||
|
return result
|
||||||
|
if isinstance(result, dict):
|
||||||
|
for key in ("data", "credentials"):
|
||||||
|
value = result.get(key)
|
||||||
|
if isinstance(value, list):
|
||||||
|
return value
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_credential(self, credential_name: str, credential_type: str = "telegramApi") -> Optional[dict]:
|
||||||
|
"""Get an existing credential by name and type."""
|
||||||
|
credentials = await self.list_credentials()
|
||||||
|
for credential in credentials:
|
||||||
|
if credential.get("name") == credential_name and credential.get("type") == credential_type:
|
||||||
|
return credential
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def create_credential(self, name: str, credential_type: str, data: dict) -> dict:
|
||||||
|
"""Create an n8n credential."""
|
||||||
|
payload = {"name": name, "type": credential_type, "data": data}
|
||||||
|
return await self._request("POST", "credentials", json=payload)
|
||||||
|
|
||||||
|
async def ensure_telegram_credential(self, bot_token: str, credential_name: str) -> dict:
|
||||||
|
"""Ensure a Telegram credential exists for the workflow trigger."""
|
||||||
|
existing = await self.get_credential(credential_name)
|
||||||
|
if existing:
|
||||||
|
return existing
|
||||||
|
return await self.create_credential(
|
||||||
|
name=credential_name,
|
||||||
|
credential_type="telegramApi",
|
||||||
|
data={"accessToken": bot_token},
|
||||||
|
)
|
||||||
|
|
||||||
async def setup_telegram_workflow(self, webhook_path: str) -> dict:
|
async def setup_telegram_workflow(self, webhook_path: str) -> dict:
|
||||||
"""Setup the Telegram webhook workflow in n8n.
|
"""Setup the Telegram webhook workflow in n8n.
|
||||||
@@ -120,117 +457,95 @@ class N8NSetupAgent:
|
|||||||
Returns:
|
Returns:
|
||||||
Result of setup operation
|
Result of setup operation
|
||||||
"""
|
"""
|
||||||
import os
|
return await self.setup(
|
||||||
webhook_token = os.getenv("TELEGRAM_BOT_TOKEN", "")
|
webhook_path=webhook_path,
|
||||||
|
backend_url=f"{settings.backend_public_url}/generate/text",
|
||||||
# Define the workflow using n8n's Telegram trigger
|
force_update=False,
|
||||||
workflow = {
|
)
|
||||||
"name": "Telegram to AI Software Factory",
|
|
||||||
"nodes": [
|
|
||||||
{
|
|
||||||
"parameters": {
|
|
||||||
"httpMethod": "post",
|
|
||||||
"responseMode": "response",
|
|
||||||
"path": webhook_path or "telegram",
|
|
||||||
"httpBody": "={{ json.stringify($json) }}",
|
|
||||||
"httpAuthType": "headerParam",
|
|
||||||
"headerParams": {
|
|
||||||
"x-n8n-internal": "true",
|
|
||||||
"content-type": "application/json"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": "webhook-node",
|
|
||||||
"name": "Telegram Webhook"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameters": {
|
|
||||||
"operation": "editFields",
|
|
||||||
"fields": "json",
|
|
||||||
"editFieldsValue": "={{ json.parse($json.text) }}",
|
|
||||||
"options": {}
|
|
||||||
},
|
|
||||||
"id": "parse-node",
|
|
||||||
"name": "Parse Message"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameters": {
|
|
||||||
"url": "http://localhost:8000/generate",
|
|
||||||
"method": "post",
|
|
||||||
"sendBody": True,
|
|
||||||
"responseMode": "onReceived",
|
|
||||||
"ignoreSSL": True,
|
|
||||||
"retResponse": True,
|
|
||||||
"sendQueryParams": False
|
|
||||||
},
|
|
||||||
"id": "api-node",
|
|
||||||
"name": "AI Software Factory API"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameters": {
|
|
||||||
"operation": "editResponse",
|
|
||||||
"editResponseValue": "={{ $json }}"
|
|
||||||
},
|
|
||||||
"id": "response-node",
|
|
||||||
"name": "Response Builder"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"connections": {
|
|
||||||
"Telegram Webhook": {
|
|
||||||
"webhook": ["parse"]
|
|
||||||
},
|
|
||||||
"Parse Message": {
|
|
||||||
"API Call": ["POST"]
|
|
||||||
},
|
|
||||||
"Response Builder": {
|
|
||||||
"respondToWebhook": ["response"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"settings": {
|
|
||||||
"executionOrder": "v1"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create the workflow
|
|
||||||
result = await self.create_workflow(workflow)
|
|
||||||
|
|
||||||
if result.get("success") or result.get("id"):
|
|
||||||
# Try to enable the workflow
|
|
||||||
enable_result = await self.enable_workflow(result.get("id", ""))
|
|
||||||
result.update(enable_result)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
async def health_check(self) -> dict:
|
async def health_check(self) -> dict:
|
||||||
"""Check n8n API health."""
|
"""Check n8n API health."""
|
||||||
import aiohttp
|
result = await self._request("GET", f"{self.api_url}/healthz")
|
||||||
try:
|
fallback = await self._request("GET", "workflows")
|
||||||
async with aiohttp.ClientSession() as session:
|
return self._build_health_result(result, fallback)
|
||||||
headers = self.get_auth_headers()
|
|
||||||
|
|
||||||
async with session.get(
|
def health_check_sync(self) -> dict:
|
||||||
f"{self.api_url}/api/v1/workflow",
|
"""Synchronously check n8n API health for UI rendering."""
|
||||||
headers=headers
|
result = self._request_sync("GET", f"{self.api_url}/healthz")
|
||||||
) as resp:
|
fallback = self._request_sync("GET", "workflows")
|
||||||
if resp.status == 200:
|
return self._build_health_result(result, fallback)
|
||||||
return {"status": "ok"}
|
|
||||||
else:
|
|
||||||
return {"error": f"Status {resp.status}"}
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e)}
|
|
||||||
|
|
||||||
async def setup(self) -> dict:
|
async def setup(
|
||||||
|
self,
|
||||||
|
webhook_path: str = "telegram",
|
||||||
|
backend_url: str | None = None,
|
||||||
|
force_update: bool = False,
|
||||||
|
use_telegram_trigger: bool | None = None,
|
||||||
|
telegram_bot_token: str | None = None,
|
||||||
|
telegram_credential_name: str | None = None,
|
||||||
|
) -> dict:
|
||||||
"""Setup n8n webhooks automatically."""
|
"""Setup n8n webhooks automatically."""
|
||||||
# First, verify n8n is accessible
|
# First, verify n8n is accessible
|
||||||
health = await self.health_check()
|
health = await self.health_check()
|
||||||
if health.get("error"):
|
if health.get("error"):
|
||||||
return {"status": "error", "message": health.get("error")}
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": health.get("message") or health.get("error"),
|
||||||
|
"health": health,
|
||||||
|
"checks": health.get("checks", []),
|
||||||
|
"suggestion": health.get("suggestion"),
|
||||||
|
}
|
||||||
|
|
||||||
# Try to get existing telegram workflow
|
effective_backend_url = backend_url or f"{settings.backend_public_url}/generate/text"
|
||||||
existing = await self.get_workflow("Telegram to AI Software Factory")
|
effective_bot_token = telegram_bot_token or settings.telegram_bot_token
|
||||||
if existing and not existing.get("error"):
|
effective_credential_name = telegram_credential_name or settings.n8n_telegram_credential_name
|
||||||
# Enable existing workflow
|
trigger_mode = use_telegram_trigger if use_telegram_trigger is not None else bool(effective_bot_token)
|
||||||
return await self.enable_workflow(existing.get("id", ""))
|
|
||||||
|
|
||||||
# Create new workflow
|
if trigger_mode:
|
||||||
result = await self.setup_telegram_workflow("/webhook/telegram")
|
credential = await self.ensure_telegram_credential(effective_bot_token, effective_credential_name)
|
||||||
return result
|
if credential.get("error"):
|
||||||
|
return {"status": "error", "message": credential["error"], "details": credential}
|
||||||
|
workflow = self.build_telegram_trigger_workflow(
|
||||||
|
backend_url=effective_backend_url,
|
||||||
|
credential_name=effective_credential_name,
|
||||||
|
allowed_chat_id=settings.telegram_chat_id,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
workflow = self.build_telegram_workflow(
|
||||||
|
webhook_path=webhook_path,
|
||||||
|
backend_url=effective_backend_url,
|
||||||
|
allowed_chat_id=settings.telegram_chat_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
existing = await self.get_workflow(workflow["name"])
|
||||||
|
if isinstance(existing, dict) and existing.get("error"):
|
||||||
|
return {"status": "error", "message": existing["error"], "details": existing}
|
||||||
|
|
||||||
|
workflow_id = None
|
||||||
|
if existing and existing.get("id"):
|
||||||
|
workflow_id = str(existing["id"])
|
||||||
|
if force_update:
|
||||||
|
result = await self.update_workflow(workflow_id, workflow)
|
||||||
|
else:
|
||||||
|
result = existing
|
||||||
|
else:
|
||||||
|
result = await self.create_workflow(workflow)
|
||||||
|
workflow_id = str(result.get("id", "")) if isinstance(result, dict) else None
|
||||||
|
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
return {"status": "error", "message": result["error"], "details": result}
|
||||||
|
|
||||||
|
workflow_id = workflow_id or str(result.get("id", ""))
|
||||||
|
enable_result = await self.enable_workflow(workflow_id)
|
||||||
|
if enable_result.get("error"):
|
||||||
|
return {"status": "error", "message": enable_result["error"], "workflow": result, "details": enable_result}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"message": f'Workflow "{workflow["name"]}" is active',
|
||||||
|
"workflow_id": workflow_id,
|
||||||
|
"workflow_name": workflow["name"],
|
||||||
|
"webhook_path": webhook_path.strip().strip("/") or "telegram",
|
||||||
|
"backend_url": effective_backend_url,
|
||||||
|
"trigger_mode": "telegram" if trigger_mode else "webhook",
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,14 +1,26 @@
|
|||||||
"""Agent orchestrator for software generation."""
|
"""Agent orchestrator for software generation."""
|
||||||
|
|
||||||
import asyncio
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import difflib
|
||||||
|
import py_compile
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from agents.git_manager import GitManager
|
|
||||||
from agents.ui_manager import UIManager
|
|
||||||
from agents.gitea import GiteaAPI
|
|
||||||
from agents.database_manager import DatabaseManager
|
|
||||||
from config import settings
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import os
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
from .database_manager import DatabaseManager
|
||||||
|
from .git_manager import GitManager
|
||||||
|
from .gitea import GiteaAPI
|
||||||
|
from .ui_manager import UIManager
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
from agents.database_manager import DatabaseManager
|
||||||
|
from agents.git_manager import GitManager
|
||||||
|
from agents.gitea import GiteaAPI
|
||||||
|
from agents.ui_manager import UIManager
|
||||||
|
|
||||||
|
|
||||||
class AgentOrchestrator:
|
class AgentOrchestrator:
|
||||||
@@ -21,7 +33,13 @@ class AgentOrchestrator:
|
|||||||
description: str,
|
description: str,
|
||||||
features: list,
|
features: list,
|
||||||
tech_stack: list,
|
tech_stack: list,
|
||||||
db = None
|
db=None,
|
||||||
|
prompt_text: str | None = None,
|
||||||
|
prompt_actor: str = "api",
|
||||||
|
existing_history=None,
|
||||||
|
prompt_source_context: dict | None = None,
|
||||||
|
prompt_routing: dict | None = None,
|
||||||
|
related_issue_hint: dict | None = None,
|
||||||
):
|
):
|
||||||
"""Initialize orchestrator."""
|
"""Initialize orchestrator."""
|
||||||
self.project_id = project_id
|
self.project_id = project_id
|
||||||
@@ -36,76 +54,487 @@ class AgentOrchestrator:
|
|||||||
self.logs = []
|
self.logs = []
|
||||||
self.ui_data = {}
|
self.ui_data = {}
|
||||||
self.db = db
|
self.db = db
|
||||||
|
self.prompt_text = prompt_text
|
||||||
# Initialize agents
|
self.prompt_actor = prompt_actor
|
||||||
self.git_manager = GitManager(project_id)
|
self.prompt_source_context = prompt_source_context or {}
|
||||||
self.ui_manager = UIManager(project_id)
|
self.prompt_routing = prompt_routing or {}
|
||||||
|
self.existing_history = existing_history
|
||||||
|
self.changed_files: list[str] = []
|
||||||
self.gitea_api = GiteaAPI(
|
self.gitea_api = GiteaAPI(
|
||||||
token=settings.GITEA_TOKEN,
|
token=settings.GITEA_TOKEN,
|
||||||
base_url=settings.GITEA_URL,
|
base_url=settings.GITEA_URL,
|
||||||
owner=settings.GITEA_OWNER,
|
owner=settings.GITEA_OWNER,
|
||||||
repo=settings.GITEA_REPO or ""
|
repo=settings.GITEA_REPO or ""
|
||||||
)
|
)
|
||||||
|
self.project_root = settings.projects_root / project_id
|
||||||
|
self.prompt_audit = None
|
||||||
|
self.repo_name = settings.gitea_repo or self.gitea_api.build_project_repo_name(project_id, project_name)
|
||||||
|
self.repo_owner = settings.gitea_owner
|
||||||
|
self.repo_url = None
|
||||||
|
self.branch_name = self._build_pr_branch_name(project_id)
|
||||||
|
self.active_pull_request = None
|
||||||
|
self._gitea_username: str | None = None
|
||||||
|
hinted_issue_number = (related_issue_hint or {}).get('number') if related_issue_hint else None
|
||||||
|
self.related_issue_number = hinted_issue_number if hinted_issue_number is not None else self._extract_issue_number(prompt_text)
|
||||||
|
self.related_issue: dict | None = DatabaseManager._normalize_issue(related_issue_hint)
|
||||||
|
|
||||||
|
# Initialize agents
|
||||||
|
self.git_manager = GitManager(project_id, project_dir=str(self.project_root))
|
||||||
|
self.ui_manager = UIManager(project_id)
|
||||||
|
|
||||||
# Initialize database manager if db session provided
|
# Initialize database manager if db session provided
|
||||||
self.db_manager = None
|
self.db_manager = None
|
||||||
self.history = None
|
self.history = None
|
||||||
if db:
|
if db:
|
||||||
self.db_manager = DatabaseManager(db)
|
self.db_manager = DatabaseManager(db)
|
||||||
# Log project start to database
|
if existing_history is not None:
|
||||||
self.history = self.db_manager.log_project_start(
|
self.history = existing_history
|
||||||
project_id=project_id,
|
self.project_id = existing_history.project_id
|
||||||
project_name=project_name,
|
self.project_name = existing_history.project_name or project_name
|
||||||
description=description
|
self.description = existing_history.description or description
|
||||||
|
else:
|
||||||
|
self.history = self.db_manager.log_project_start(
|
||||||
|
project_id=project_id,
|
||||||
|
project_name=project_name,
|
||||||
|
description=description
|
||||||
|
)
|
||||||
|
self.db_manager = DatabaseManager(db)
|
||||||
|
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id)
|
||||||
|
if existing_history is not None and self.history is not None:
|
||||||
|
latest_ui = self.db_manager._get_latest_ui_snapshot_data(self.history.id)
|
||||||
|
repository = latest_ui.get('repository') if isinstance(latest_ui, dict) else None
|
||||||
|
if isinstance(repository, dict) and repository:
|
||||||
|
self.repo_owner = repository.get('owner') or self.repo_owner
|
||||||
|
self.repo_name = repository.get('name') or self.repo_name
|
||||||
|
self.repo_url = repository.get('url') or self.repo_url
|
||||||
|
if self.prompt_text:
|
||||||
|
self.prompt_audit = self.db_manager.log_prompt_submission(
|
||||||
|
history_id=self.history.id,
|
||||||
|
project_id=self.project_id,
|
||||||
|
prompt_text=self.prompt_text,
|
||||||
|
features=self.features,
|
||||||
|
tech_stack=self.tech_stack,
|
||||||
|
actor_name=self.prompt_actor,
|
||||||
|
related_issue={'number': self.related_issue_number} if self.related_issue_number is not None else None,
|
||||||
|
source_context=self.prompt_source_context,
|
||||||
|
routing=self.prompt_routing,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ui_manager.ui_data["project_root"] = str(self.project_root)
|
||||||
|
self.ui_manager.ui_data["features"] = list(self.features)
|
||||||
|
self.ui_manager.ui_data["tech_stack"] = list(self.tech_stack)
|
||||||
|
self.ui_manager.ui_data["repository"] = {
|
||||||
|
"owner": self.repo_owner,
|
||||||
|
"name": self.repo_name,
|
||||||
|
"mode": "project" if settings.use_project_repositories else "shared",
|
||||||
|
"status": "pending" if settings.use_project_repositories else "shared",
|
||||||
|
"provider": "gitea",
|
||||||
|
}
|
||||||
|
if self.related_issue:
|
||||||
|
self.ui_manager.ui_data["related_issue"] = self.related_issue
|
||||||
|
if self.active_pull_request:
|
||||||
|
self.ui_manager.ui_data["pull_request"] = self.active_pull_request
|
||||||
|
|
||||||
|
def _build_pr_branch_name(self, project_id: str) -> str:
|
||||||
|
"""Build a stable branch name used until the PR is merged."""
|
||||||
|
return f"ai/{project_id}"
|
||||||
|
|
||||||
|
def _extract_issue_number(self, prompt_text: str | None) -> int | None:
|
||||||
|
"""Extract an issue reference from prompt text."""
|
||||||
|
if not prompt_text:
|
||||||
|
return None
|
||||||
|
match = re.search(r'(?:#|issue\s+)(\d+)', prompt_text, flags=re.IGNORECASE)
|
||||||
|
return int(match.group(1)) if match else None
|
||||||
|
|
||||||
|
def _build_repo_url(self, owner: str | None, repo: str | None) -> str | None:
|
||||||
|
if not owner or not repo or not settings.gitea_url:
|
||||||
|
return None
|
||||||
|
return f"{settings.gitea_url.rstrip('/')}/{owner}/{repo}"
|
||||||
|
|
||||||
|
def _log_generation_plan_trace(self) -> None:
|
||||||
|
"""Persist the current generation plan as an inspectable trace."""
|
||||||
|
if not self.db_manager or not self.history or not self.prompt_audit:
|
||||||
|
return
|
||||||
|
planned_files = list(self._template_files().keys())
|
||||||
|
self.db_manager.log_llm_trace(
|
||||||
|
project_id=self.project_id,
|
||||||
|
history_id=self.history.id,
|
||||||
|
prompt_id=self.prompt_audit.id,
|
||||||
|
stage='generation_plan',
|
||||||
|
provider='factory-planner',
|
||||||
|
model='template-generator',
|
||||||
|
system_prompt='Plan the generated project structure from the structured request and repository state.',
|
||||||
|
user_prompt=self.prompt_text or self.description,
|
||||||
|
assistant_response=(
|
||||||
|
f"Planned files: {', '.join(planned_files)}. "
|
||||||
|
f"Target branch: {self.branch_name}. "
|
||||||
|
f"Repository mode: {self.ui_manager.ui_data.get('repository', {}).get('mode', 'unknown')}."
|
||||||
|
+ (
|
||||||
|
f" Linked issue: #{self.related_issue.get('number')} {self.related_issue.get('title')}."
|
||||||
|
if self.related_issue else ''
|
||||||
|
)
|
||||||
|
),
|
||||||
|
raw_response={
|
||||||
|
'planned_files': planned_files,
|
||||||
|
'features': list(self.features),
|
||||||
|
'tech_stack': list(self.tech_stack),
|
||||||
|
'branch': self.branch_name,
|
||||||
|
'repository': self.ui_manager.ui_data.get('repository', {}),
|
||||||
|
'related_issue': self.related_issue,
|
||||||
|
},
|
||||||
|
fallback_used=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _sync_issue_context(self) -> None:
|
||||||
|
"""Sync repository issues and resolve a linked issue from the prompt when present."""
|
||||||
|
if not self.db_manager or not self.history:
|
||||||
|
return
|
||||||
|
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||||
|
owner = repository.get('owner') or self.repo_owner
|
||||||
|
repo_name = repository.get('name') or self.repo_name
|
||||||
|
if not owner or not repo_name or not settings.gitea_url or not settings.gitea_token:
|
||||||
|
return
|
||||||
|
issues_result = self.db_manager.sync_repository_issues(project_id=self.project_id, gitea_api=self.gitea_api, state='open')
|
||||||
|
self.ui_manager.ui_data['issues'] = issues_result.get('issues', []) if issues_result.get('status') == 'success' else []
|
||||||
|
if self.related_issue_number is None:
|
||||||
|
return
|
||||||
|
issue_payload = await self.gitea_api.get_issue(issue_number=self.related_issue_number, owner=owner, repo=repo_name)
|
||||||
|
if isinstance(issue_payload, dict) and issue_payload.get('error'):
|
||||||
|
return
|
||||||
|
if issue_payload.get('pull_request'):
|
||||||
|
return
|
||||||
|
self.related_issue = DatabaseManager._normalize_issue(issue_payload)
|
||||||
|
self.ui_manager.ui_data['related_issue'] = self.related_issue
|
||||||
|
if self.prompt_audit:
|
||||||
|
self.db_manager.attach_issue_to_prompt(self.prompt_audit.id, self.related_issue)
|
||||||
|
|
||||||
|
async def _ensure_remote_repository(self) -> None:
|
||||||
|
if not settings.use_project_repositories:
|
||||||
|
self.ui_manager.ui_data["repository"]["status"] = "shared"
|
||||||
|
if settings.gitea_repo:
|
||||||
|
predicted_url = self._build_repo_url(self.repo_owner, self.repo_name)
|
||||||
|
if predicted_url:
|
||||||
|
self.repo_url = predicted_url
|
||||||
|
self.ui_manager.ui_data["repository"]["url"] = predicted_url
|
||||||
|
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||||
|
"status": "shared",
|
||||||
|
"detail": "Using the configured shared repository instead of provisioning a per-project repo.",
|
||||||
|
}
|
||||||
|
return
|
||||||
|
if not self.repo_owner or not settings.gitea_token or not settings.gitea_url:
|
||||||
|
self.ui_manager.ui_data["repository"]["status"] = "skipped"
|
||||||
|
self.ui_manager.ui_data["repository"]["reason"] = "Missing Gitea owner, URL, or token configuration"
|
||||||
|
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||||
|
"status": "skipped",
|
||||||
|
"detail": "Missing Gitea owner, URL, or token configuration",
|
||||||
|
}
|
||||||
|
return
|
||||||
|
|
||||||
|
repo_name = self.repo_name
|
||||||
|
result = await self.gitea_api.create_repo(
|
||||||
|
repo_name=repo_name,
|
||||||
|
owner=self.repo_owner,
|
||||||
|
description=f"AI-generated project for {self.project_name}",
|
||||||
|
auto_init=False,
|
||||||
|
)
|
||||||
|
if result.get("status") == "exists" and repo_name == self.gitea_api.build_project_repo_name(self.project_id, self.project_name):
|
||||||
|
repo_name = f"{repo_name}-{self.project_id.split('-')[-1]}"
|
||||||
|
result = await self.gitea_api.create_repo(
|
||||||
|
repo_name=repo_name,
|
||||||
|
owner=self.repo_owner,
|
||||||
|
description=f"AI-generated project for {self.project_name}",
|
||||||
|
auto_init=False,
|
||||||
)
|
)
|
||||||
# Re-fetch with new history_id
|
self.repo_name = repo_name
|
||||||
self.db_manager = DatabaseManager(db)
|
self.ui_manager.ui_data["repository"]["name"] = repo_name
|
||||||
|
if self.db_manager:
|
||||||
|
self.db_manager.log_system_event(
|
||||||
|
component="gitea",
|
||||||
|
level="ERROR" if result.get("error") else "INFO",
|
||||||
|
message=(
|
||||||
|
f"Repository setup failed for {self.repo_owner}/{self.repo_name}: {result.get('error')}"
|
||||||
|
if result.get("error")
|
||||||
|
else f"Prepared repository {self.repo_owner}/{self.repo_name}"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
repo_status = result.get("status", "error" if result.get("error") else "ready")
|
||||||
|
self.ui_manager.ui_data["repository"]["status"] = repo_status
|
||||||
|
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||||
|
key: value
|
||||||
|
for key, value in result.items()
|
||||||
|
if key not in {"private"}
|
||||||
|
}
|
||||||
|
if result.get("status_code") is not None:
|
||||||
|
self.ui_manager.ui_data["repository"]["api_status_code"] = result.get("status_code")
|
||||||
|
if result.get("error"):
|
||||||
|
self.ui_manager.ui_data["repository"]["reason"] = result.get("error")
|
||||||
|
self.ui_manager.ui_data["repository"].pop("url", None)
|
||||||
|
elif result.get("html_url"):
|
||||||
|
self.repo_url = result["html_url"]
|
||||||
|
self.ui_manager.ui_data["repository"]["url"] = self.repo_url
|
||||||
|
clone_url = result.get("clone_url") or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||||
|
if clone_url:
|
||||||
|
self.ui_manager.ui_data["repository"]["clone_url"] = clone_url
|
||||||
|
self.ui_manager.ui_data["repository"].pop("reason", None)
|
||||||
|
elif repo_status == "exists":
|
||||||
|
predicted_url = self._build_repo_url(self.repo_owner, self.repo_name)
|
||||||
|
if predicted_url:
|
||||||
|
self.repo_url = predicted_url
|
||||||
|
self.ui_manager.ui_data["repository"]["url"] = predicted_url
|
||||||
|
clone_url = result.get("clone_url") or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||||
|
if clone_url:
|
||||||
|
self.ui_manager.ui_data["repository"]["clone_url"] = clone_url
|
||||||
|
else:
|
||||||
|
self.ui_manager.ui_data["repository"].pop("url", None)
|
||||||
|
|
||||||
|
async def _resolve_gitea_username(self) -> str:
|
||||||
|
"""Resolve and cache the Gitea login used for authenticated git operations."""
|
||||||
|
if self._gitea_username:
|
||||||
|
return self._gitea_username
|
||||||
|
user_info = await self.gitea_api.get_current_user()
|
||||||
|
if user_info.get('error') or not user_info.get('login'):
|
||||||
|
raise RuntimeError(f"Unable to resolve Gitea user for push: {user_info.get('error', 'missing login')}")
|
||||||
|
self._gitea_username = user_info['login']
|
||||||
|
return self._gitea_username
|
||||||
|
|
||||||
|
async def _push_branch(self, branch: str) -> dict | None:
|
||||||
|
"""Push a branch to the configured project repository when available."""
|
||||||
|
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||||
|
if repository.get('mode') != 'project':
|
||||||
|
return None
|
||||||
|
if repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||||
|
return None
|
||||||
|
if not settings.gitea_token or not self.repo_owner or not self.repo_name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
clone_url = repository.get('clone_url') or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||||
|
if not clone_url:
|
||||||
|
return None
|
||||||
|
username = await self._resolve_gitea_username()
|
||||||
|
self.git_manager.push_with_credentials(
|
||||||
|
remote_url=clone_url,
|
||||||
|
username=username,
|
||||||
|
password=settings.gitea_token,
|
||||||
|
remote='origin',
|
||||||
|
branch=branch,
|
||||||
|
)
|
||||||
|
return {'status': 'pushed', 'remote': clone_url, 'branch': branch}
|
||||||
|
|
||||||
|
async def _prepare_git_workspace(self) -> None:
|
||||||
|
"""Initialize the local repo and ensure the PR branch exists before writing files."""
|
||||||
|
if not self.git_manager.is_git_available():
|
||||||
|
self.ui_manager.ui_data.setdefault('git', {})['error'] = 'git executable is not available in PATH'
|
||||||
|
self._append_log('Local git workspace skipped: git executable is not available in PATH')
|
||||||
|
return
|
||||||
|
if not self.git_manager.has_repo():
|
||||||
|
self.git_manager.init_repo()
|
||||||
|
|
||||||
|
if not self.git_manager.current_head_or_none():
|
||||||
|
self.git_manager.create_empty_commit('Initialize project repository')
|
||||||
|
try:
|
||||||
|
await self._push_branch('main')
|
||||||
|
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||||
|
self.ui_manager.ui_data.setdefault('git', {})['remote_error'] = str(exc)
|
||||||
|
self._append_log(f'Initial main push skipped: {exc}')
|
||||||
|
|
||||||
|
if self.git_manager.branch_exists(self.branch_name):
|
||||||
|
self.git_manager.checkout_branch(self.branch_name)
|
||||||
|
else:
|
||||||
|
self.git_manager.checkout_branch(self.branch_name, create=True, start_point='main')
|
||||||
|
self.ui_manager.ui_data.setdefault('git', {})['active_branch'] = self.branch_name
|
||||||
|
|
||||||
|
async def _ensure_pull_request(self) -> dict | None:
|
||||||
|
"""Create the project pull request on first delivery and reuse it later."""
|
||||||
|
if self.active_pull_request:
|
||||||
|
self.ui_manager.ui_data['pull_request'] = self.active_pull_request
|
||||||
|
return self.active_pull_request
|
||||||
|
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||||
|
if repository.get('mode') != 'project' or repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||||
|
return None
|
||||||
|
|
||||||
|
title = f"AI delivery for {self.project_name}"
|
||||||
|
body = (
|
||||||
|
f"Automated software factory changes for {self.project_name}.\n\n"
|
||||||
|
f"Prompt: {self.prompt_text or self.description}\n\n"
|
||||||
|
f"Branch: {self.branch_name}"
|
||||||
|
)
|
||||||
|
result = await self.gitea_api.create_pull_request(
|
||||||
|
title=title,
|
||||||
|
body=body,
|
||||||
|
owner=self.repo_owner,
|
||||||
|
repo=self.repo_name,
|
||||||
|
base='main',
|
||||||
|
head=self.branch_name,
|
||||||
|
)
|
||||||
|
if result.get('error'):
|
||||||
|
raise RuntimeError(f"Unable to create pull request: {result.get('error')}")
|
||||||
|
|
||||||
|
pr_number = result.get('number') or result.get('id') or 0
|
||||||
|
pr_data = {
|
||||||
|
'pr_number': pr_number,
|
||||||
|
'title': result.get('title', title),
|
||||||
|
'body': result.get('body', body),
|
||||||
|
'state': result.get('state', 'open'),
|
||||||
|
'base': result.get('base', {}).get('ref', 'main') if isinstance(result.get('base'), dict) else 'main',
|
||||||
|
'user': result.get('user', {}).get('login', 'system') if isinstance(result.get('user'), dict) else 'system',
|
||||||
|
'pr_url': result.get('html_url') or self.gitea_api.build_pull_request_url(pr_number, self.repo_owner, self.repo_name),
|
||||||
|
'merged': bool(result.get('merged')),
|
||||||
|
'pr_state': result.get('state', 'open'),
|
||||||
|
}
|
||||||
|
if self.db_manager and self.history:
|
||||||
|
self.db_manager.save_pr_data(self.history.id, pr_data)
|
||||||
|
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id) if self.db_manager else pr_data
|
||||||
|
self.ui_manager.ui_data['pull_request'] = self.active_pull_request or pr_data
|
||||||
|
return self.active_pull_request or pr_data
|
||||||
|
|
||||||
|
async def _push_remote_commit(self, commit_hash: str, commit_message: str, changed_files: list[str], base_commit: str | None) -> dict | None:
|
||||||
|
"""Push the local commit to the provisioned Gitea repository and build browser links."""
|
||||||
|
repository = self.ui_manager.ui_data.get("repository") or {}
|
||||||
|
if repository.get("mode") != "project":
|
||||||
|
return None
|
||||||
|
if repository.get("status") not in {"created", "exists", "ready"}:
|
||||||
|
return None
|
||||||
|
push_result = await self._push_branch(self.branch_name)
|
||||||
|
if push_result is None:
|
||||||
|
return None
|
||||||
|
pull_request = await self._ensure_pull_request()
|
||||||
|
commit_url = self.gitea_api.build_commit_url(commit_hash, owner=self.repo_owner, repo=self.repo_name)
|
||||||
|
compare_url = self.gitea_api.build_compare_url(base_commit, commit_hash, owner=self.repo_owner, repo=self.repo_name) if base_commit else None
|
||||||
|
remote_record = {
|
||||||
|
"status": "pushed",
|
||||||
|
"remote": push_result.get('remote'),
|
||||||
|
"branch": self.branch_name,
|
||||||
|
"commit_url": commit_url,
|
||||||
|
"compare_url": compare_url,
|
||||||
|
"changed_files": changed_files,
|
||||||
|
"pull_request": pull_request,
|
||||||
|
}
|
||||||
|
self.ui_manager.ui_data.setdefault("git", {})["remote_push"] = remote_record
|
||||||
|
repository["last_commit_url"] = commit_url
|
||||||
|
if compare_url:
|
||||||
|
repository["last_compare_url"] = compare_url
|
||||||
|
self._append_log(f"Pushed generated commit to {self.repo_owner}/{self.repo_name}.")
|
||||||
|
return remote_record
|
||||||
|
|
||||||
|
def _build_diff_text(self, relative_path: str, previous_content: str, new_content: str) -> str:
|
||||||
|
"""Build a unified diff for display in the dashboard."""
|
||||||
|
previous_lines = previous_content.splitlines(keepends=True)
|
||||||
|
new_lines = new_content.splitlines(keepends=True)
|
||||||
|
diff = difflib.unified_diff(
|
||||||
|
previous_lines,
|
||||||
|
new_lines,
|
||||||
|
fromfile=f"a/{relative_path}",
|
||||||
|
tofile=f"b/{relative_path}",
|
||||||
|
)
|
||||||
|
return "".join(diff)
|
||||||
|
|
||||||
|
def _append_log(self, message: str) -> None:
|
||||||
|
timestamped = f"[{datetime.utcnow().isoformat()}] {message}"
|
||||||
|
self.logs.append(timestamped)
|
||||||
|
if self.db_manager and self.history:
|
||||||
|
self.db_manager._log_action(self.history.id, "INFO", message)
|
||||||
|
|
||||||
|
def _update_progress(self, progress: int, step: str, message: str) -> None:
|
||||||
|
self.progress = progress
|
||||||
|
self.current_step = step
|
||||||
|
self.message = message
|
||||||
|
self.ui_manager.update_status(self.status, progress, message)
|
||||||
|
if self.db_manager and self.history:
|
||||||
|
self.db_manager.log_progress_update(
|
||||||
|
history_id=self.history.id,
|
||||||
|
progress=progress,
|
||||||
|
step=step,
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _write_file(self, relative_path: str, content: str) -> None:
|
||||||
|
target = self.project_root / relative_path
|
||||||
|
target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
change_type = "UPDATE" if target.exists() else "CREATE"
|
||||||
|
previous_content = target.read_text(encoding="utf-8") if target.exists() else ""
|
||||||
|
diff_text = self._build_diff_text(relative_path, previous_content, content)
|
||||||
|
target.write_text(content, encoding="utf-8")
|
||||||
|
self.changed_files.append(relative_path)
|
||||||
|
if self.db_manager and self.history:
|
||||||
|
self.db_manager.log_code_change(
|
||||||
|
project_id=self.project_id,
|
||||||
|
change_type=change_type,
|
||||||
|
file_path=relative_path,
|
||||||
|
actor="orchestrator",
|
||||||
|
actor_type="agent",
|
||||||
|
details=f"{change_type.title()}d generated artifact {relative_path}",
|
||||||
|
history_id=self.history.id,
|
||||||
|
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||||
|
diff_summary=f"Wrote {len(content.splitlines())} lines to {relative_path}",
|
||||||
|
diff_text=diff_text,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _template_files(self) -> dict[str, str]:
|
||||||
|
feature_section = "\n".join(f"- {feature}" for feature in self.features) or "- None specified"
|
||||||
|
tech_section = "\n".join(f"- {tech}" for tech in self.tech_stack) or "- Python"
|
||||||
|
return {
|
||||||
|
".gitignore": "__pycache__/\n*.pyc\n.venv/\n.pytest_cache/\n.mypy_cache/\n",
|
||||||
|
"README.md": (
|
||||||
|
f"# {self.project_name}\n\n"
|
||||||
|
f"{self.description}\n\n"
|
||||||
|
"## Features\n"
|
||||||
|
f"{feature_section}\n\n"
|
||||||
|
"## Tech Stack\n"
|
||||||
|
f"{tech_section}\n"
|
||||||
|
),
|
||||||
|
"requirements.txt": "fastapi\nuvicorn\npytest\n",
|
||||||
|
"main.py": (
|
||||||
|
"from fastapi import FastAPI\n\n"
|
||||||
|
"app = FastAPI(title=\"Generated App\")\n\n"
|
||||||
|
"@app.get('/')\n"
|
||||||
|
"def read_root():\n"
|
||||||
|
f" return {{'name': '{self.project_name}', 'status': 'generated', 'features': {self.features!r}}}\n"
|
||||||
|
),
|
||||||
|
"tests/test_app.py": (
|
||||||
|
"from main import read_root\n\n"
|
||||||
|
"def test_read_root():\n"
|
||||||
|
f" assert read_root()['name'] == '{self.project_name}'\n"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
async def run(self) -> dict:
|
async def run(self) -> dict:
|
||||||
"""Run the software generation process with full audit logging."""
|
"""Run the software generation process with full audit logging."""
|
||||||
try:
|
try:
|
||||||
# Step 1: Initialize project
|
# Step 1: Initialize project
|
||||||
self.progress = 5
|
self.status = "running"
|
||||||
self.current_step = "Initializing project"
|
self._update_progress(5, "initializing", "Setting up project structure...")
|
||||||
self.message = "Setting up project structure..."
|
self._append_log("Initializing project.")
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Initializing project.")
|
|
||||||
|
await self._ensure_remote_repository()
|
||||||
|
await self._sync_issue_context()
|
||||||
|
await self._prepare_git_workspace()
|
||||||
|
self._log_generation_plan_trace()
|
||||||
|
|
||||||
# Step 2: Create project structure (skip git operations)
|
# Step 2: Create project structure (skip git operations)
|
||||||
self.progress = 15
|
self._update_progress(20, "project-structure", "Creating project files...")
|
||||||
self.current_step = "Creating project structure"
|
|
||||||
self.message = "Creating project files..."
|
|
||||||
await self._create_project_structure()
|
await self._create_project_structure()
|
||||||
|
|
||||||
# Step 3: Generate initial code
|
# Step 3: Generate initial code
|
||||||
self.progress = 25
|
self._update_progress(55, "code-generation", "Generating project entrypoint and tests...")
|
||||||
self.current_step = "Generating initial code"
|
|
||||||
self.message = "Generating initial code with Ollama..."
|
|
||||||
await self._generate_code()
|
await self._generate_code()
|
||||||
|
|
||||||
# Step 4: Test the code
|
# Step 4: Test the code
|
||||||
self.progress = 50
|
self._update_progress(80, "validation", "Validating generated code...")
|
||||||
self.current_step = "Testing code"
|
|
||||||
self.message = "Running tests..."
|
|
||||||
await self._run_tests()
|
await self._run_tests()
|
||||||
|
|
||||||
# Step 5: Commit to git (skip in test env)
|
# Step 5: Commit generated artifacts locally for traceability
|
||||||
self.progress = 75
|
self._update_progress(90, "git", "Recording generated changes in git...")
|
||||||
self.current_step = "Committing to git"
|
await self._commit_to_git()
|
||||||
self.message = "Skipping git operations in test environment..."
|
|
||||||
|
|
||||||
# Step 6: Create PR (skip in test env)
|
|
||||||
self.progress = 90
|
|
||||||
self.current_step = "Creating PR"
|
|
||||||
self.message = "Skipping PR creation in test environment..."
|
|
||||||
|
|
||||||
# Step 7: Complete
|
# Step 7: Complete
|
||||||
self.progress = 100
|
self.status = "completed"
|
||||||
self.current_step = "Completed"
|
self._update_progress(100, "completed", "Software generation complete!")
|
||||||
self.message = "Software generation complete!"
|
self._append_log("Software generation complete!")
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Software generation complete!")
|
self.ui_manager.ui_data["changed_files"] = list(dict.fromkeys(self.changed_files))
|
||||||
|
|
||||||
# Log completion to database if available
|
# Log completion to database if available
|
||||||
if self.db_manager and self.history:
|
if self.db_manager and self.history:
|
||||||
|
self.db_manager.save_ui_snapshot(self.history.id, self.ui_manager.get_ui_data())
|
||||||
self.db_manager.log_project_complete(
|
self.db_manager.log_project_complete(
|
||||||
history_id=self.history.id,
|
history_id=self.history.id,
|
||||||
message="Software generation complete!"
|
message="Software generation complete!"
|
||||||
@@ -118,13 +547,18 @@ class AgentOrchestrator:
|
|||||||
"current_step": self.current_step,
|
"current_step": self.current_step,
|
||||||
"logs": self.logs,
|
"logs": self.logs,
|
||||||
"ui_data": self.ui_manager.ui_data,
|
"ui_data": self.ui_manager.ui_data,
|
||||||
"history_id": self.history.id if self.history else None
|
"history_id": self.history.id if self.history else None,
|
||||||
|
"project_root": str(self.project_root),
|
||||||
|
"changed_files": list(dict.fromkeys(self.changed_files)),
|
||||||
|
"repository": self.ui_manager.ui_data.get("repository"),
|
||||||
|
"related_issue": self.related_issue,
|
||||||
|
"pull_request": self.ui_manager.ui_data.get("pull_request"),
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.status = "error"
|
self.status = "error"
|
||||||
self.message = f"Error: {str(e)}"
|
self.message = f"Error: {str(e)}"
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Error: {str(e)}")
|
self._append_log(f"Error: {str(e)}")
|
||||||
|
|
||||||
# Log error to database if available
|
# Log error to database if available
|
||||||
if self.db_manager and self.history:
|
if self.db_manager and self.history:
|
||||||
@@ -141,68 +575,110 @@ class AgentOrchestrator:
|
|||||||
"logs": self.logs,
|
"logs": self.logs,
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
"ui_data": self.ui_manager.ui_data,
|
"ui_data": self.ui_manager.ui_data,
|
||||||
"history_id": self.history.id if self.history else None
|
"history_id": self.history.id if self.history else None,
|
||||||
|
"project_root": str(self.project_root),
|
||||||
|
"changed_files": list(dict.fromkeys(self.changed_files)),
|
||||||
|
"repository": self.ui_manager.ui_data.get("repository"),
|
||||||
|
"related_issue": self.related_issue,
|
||||||
|
"pull_request": self.ui_manager.ui_data.get("pull_request"),
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _create_project_structure(self) -> None:
|
async def _create_project_structure(self) -> None:
|
||||||
"""Create initial project structure."""
|
"""Create initial project structure."""
|
||||||
project_dir = self.project_id
|
self.project_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
for relative_path, content in self._template_files().items():
|
||||||
# Create .gitignore
|
if relative_path.startswith("main.py") or relative_path.startswith("tests/"):
|
||||||
gitignore_path = f"{project_dir}/.gitignore"
|
continue
|
||||||
try:
|
self._write_file(relative_path, content)
|
||||||
os.makedirs(project_dir, exist_ok=True)
|
self._append_log(f"Project structure created under {self.project_root}.")
|
||||||
with open(gitignore_path, "w") as f:
|
|
||||||
f.write("# Python\n__pycache__/\n*.pyc\n*.pyo\n*.pyd\n.Python\n*.env\n.venv/\nnode_modules/\n.env\nbuild/\ndist/\n.pytest_cache/\n.mypy_cache/\n.coverage\nhtmlcov/\n.idea/\n.vscode/\n*.swp\n*.swo\n*~\n.DS_Store\n.git\n")
|
|
||||||
except Exception as e:
|
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Failed to create .gitignore: {str(e)}")
|
|
||||||
|
|
||||||
# Create README.md
|
|
||||||
readme_path = f"{project_dir}/README.md"
|
|
||||||
try:
|
|
||||||
with open(readme_path, "w") as f:
|
|
||||||
f.write(f"# {self.project_name}\n\n{self.description}\n\n## Features\n")
|
|
||||||
for feature in self.features:
|
|
||||||
f.write(f"- {feature}\n")
|
|
||||||
f.write(f"\n## Tech Stack\n")
|
|
||||||
for tech in self.tech_stack:
|
|
||||||
f.write(f"- {tech}\n")
|
|
||||||
except Exception as e:
|
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Failed to create README.md: {str(e)}")
|
|
||||||
|
|
||||||
async def _generate_code(self) -> None:
|
async def _generate_code(self) -> None:
|
||||||
"""Generate code using Ollama."""
|
"""Generate code using Ollama."""
|
||||||
# This would call Ollama API to generate code
|
for relative_path, content in self._template_files().items():
|
||||||
# For now, create a placeholder file
|
if relative_path in {"main.py", "tests/test_app.py"}:
|
||||||
try:
|
self._write_file(relative_path, content)
|
||||||
main_py_path = f"{self.project_id}/main.py"
|
self._append_log("Application entrypoint and smoke test generated.")
|
||||||
os.makedirs(self.project_id, exist_ok=True)
|
|
||||||
with open(main_py_path, "w") as f:
|
|
||||||
f.write("# Generated by AI Software Factory\n")
|
|
||||||
f.write("print('Hello, World!')\n")
|
|
||||||
except Exception as e:
|
|
||||||
self.logs.append(f"[{datetime.utcnow().isoformat()}] Failed to create main.py: {str(e)}")
|
|
||||||
|
|
||||||
# Log code change to audit trail
|
|
||||||
if self.db_manager and self.history:
|
|
||||||
self.db_manager.log_code_change(
|
|
||||||
project_id=self.project_id,
|
|
||||||
change_type="CREATE",
|
|
||||||
file_path="main.py",
|
|
||||||
actor="agent",
|
|
||||||
actor_type="agent",
|
|
||||||
details="Generated main.py file"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _run_tests(self) -> None:
|
async def _run_tests(self) -> None:
|
||||||
"""Run tests for the generated code."""
|
"""Run tests for the generated code."""
|
||||||
# This would run pytest or other test framework
|
py_compile.compile(str(self.project_root / "main.py"), doraise=True)
|
||||||
# For now, simulate test success
|
py_compile.compile(str(self.project_root / "tests/test_app.py"), doraise=True)
|
||||||
pass
|
self._append_log("Generated Python files compiled successfully.")
|
||||||
|
|
||||||
async def _commit_to_git(self) -> None:
|
async def _commit_to_git(self) -> None:
|
||||||
"""Commit changes to git."""
|
"""Commit changes to git."""
|
||||||
pass # Skip git operations in test environment
|
unique_files = list(dict.fromkeys(self.changed_files))
|
||||||
|
if not unique_files:
|
||||||
|
return
|
||||||
|
if not self.git_manager.is_git_available():
|
||||||
|
self.ui_manager.ui_data.setdefault('git', {})['error'] = 'git executable is not available in PATH'
|
||||||
|
self._append_log('Git commit skipped: git executable is not available in PATH')
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not self.git_manager.has_repo():
|
||||||
|
self.git_manager.init_repo()
|
||||||
|
base_commit = self.git_manager.current_head_or_none()
|
||||||
|
self.git_manager.add_files(unique_files)
|
||||||
|
if not self.git_manager.get_status():
|
||||||
|
return
|
||||||
|
|
||||||
|
commit_message = f"AI generation for prompt: {self.project_name}"
|
||||||
|
commit_hash = self.git_manager.commit(commit_message)
|
||||||
|
commit_record = {
|
||||||
|
"hash": commit_hash,
|
||||||
|
"message": commit_message,
|
||||||
|
"files": unique_files,
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"scope": "local",
|
||||||
|
"branch": self.branch_name,
|
||||||
|
}
|
||||||
|
remote_record = None
|
||||||
|
try:
|
||||||
|
remote_record = await self._push_remote_commit(commit_hash, commit_message, unique_files, base_commit)
|
||||||
|
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as remote_exc:
|
||||||
|
self.ui_manager.ui_data.setdefault("git", {})["remote_error"] = str(remote_exc)
|
||||||
|
self._append_log(f"Remote git push skipped: {remote_exc}")
|
||||||
|
|
||||||
|
if remote_record:
|
||||||
|
commit_record["scope"] = "remote"
|
||||||
|
commit_record["commit_url"] = remote_record.get("commit_url")
|
||||||
|
commit_record["compare_url"] = remote_record.get("compare_url")
|
||||||
|
if remote_record.get('pull_request'):
|
||||||
|
commit_record['pull_request'] = remote_record['pull_request']
|
||||||
|
self.ui_manager.ui_data['pull_request'] = remote_record['pull_request']
|
||||||
|
self.ui_manager.ui_data.setdefault("git", {})["latest_commit"] = commit_record
|
||||||
|
self.ui_manager.ui_data.setdefault("git", {})["commits"] = [commit_record]
|
||||||
|
self._append_log(f"Recorded git commit {commit_hash[:12]} for generated files.")
|
||||||
|
if self.db_manager:
|
||||||
|
self.db_manager.log_commit(
|
||||||
|
project_id=self.project_id,
|
||||||
|
commit_message=commit_message,
|
||||||
|
actor="orchestrator",
|
||||||
|
actor_type="agent",
|
||||||
|
history_id=self.history.id if self.history else None,
|
||||||
|
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||||
|
commit_hash=commit_hash,
|
||||||
|
changed_files=unique_files,
|
||||||
|
branch=self.branch_name,
|
||||||
|
commit_url=remote_record.get("commit_url") if remote_record else None,
|
||||||
|
compare_url=remote_record.get("compare_url") if remote_record else None,
|
||||||
|
remote_status=remote_record.get("status") if remote_record else "local-only",
|
||||||
|
related_issue=self.related_issue,
|
||||||
|
)
|
||||||
|
if self.related_issue:
|
||||||
|
self.db_manager.log_issue_work(
|
||||||
|
project_id=self.project_id,
|
||||||
|
history_id=self.history.id if self.history else None,
|
||||||
|
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||||
|
issue=self.related_issue,
|
||||||
|
actor='orchestrator',
|
||||||
|
commit_hash=commit_hash,
|
||||||
|
commit_url=remote_record.get('commit_url') if remote_record else None,
|
||||||
|
)
|
||||||
|
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||||
|
self.ui_manager.ui_data.setdefault("git", {})["error"] = str(exc)
|
||||||
|
self._append_log(f"Git commit skipped: {exc}")
|
||||||
|
|
||||||
async def _create_pr(self) -> None:
|
async def _create_pr(self) -> None:
|
||||||
"""Create pull request."""
|
"""Create pull request."""
|
||||||
|
|||||||
127
ai_software_factory/agents/prompt_workflow.py
Normal file
127
ai_software_factory/agents/prompt_workflow.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
"""Helpers for prompt-level repository workflows such as undoing a prompt."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
from .database_manager import DatabaseManager
|
||||||
|
from .git_manager import GitManager
|
||||||
|
from .gitea import GiteaAPI
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
from agents.database_manager import DatabaseManager
|
||||||
|
from agents.git_manager import GitManager
|
||||||
|
from agents.gitea import GiteaAPI
|
||||||
|
|
||||||
|
|
||||||
|
class PromptWorkflowManager:
|
||||||
|
"""Coordinate prompt-level repository actions against git and Gitea."""
|
||||||
|
|
||||||
|
def __init__(self, db):
|
||||||
|
self.db_manager = DatabaseManager(db)
|
||||||
|
self.gitea_api = GiteaAPI(
|
||||||
|
token=settings.GITEA_TOKEN,
|
||||||
|
base_url=settings.GITEA_URL,
|
||||||
|
owner=settings.GITEA_OWNER,
|
||||||
|
repo=settings.GITEA_REPO or '',
|
||||||
|
)
|
||||||
|
|
||||||
|
async def undo_prompt(self, project_id: str, prompt_id: int) -> dict:
|
||||||
|
"""Revert the commit associated with a prompt and push the revert to the PR branch."""
|
||||||
|
history = self.db_manager.get_project_by_id(project_id)
|
||||||
|
if history is None:
|
||||||
|
return {'status': 'error', 'message': 'Project not found'}
|
||||||
|
|
||||||
|
correlations = self.db_manager.get_prompt_change_correlations(project_id=project_id, limit=500)
|
||||||
|
correlation = next((item for item in correlations if item.get('prompt_id') == prompt_id), None)
|
||||||
|
if correlation is None:
|
||||||
|
return {'status': 'error', 'message': 'Prompt not found for project'}
|
||||||
|
if correlation.get('revert'):
|
||||||
|
return {'status': 'ignored', 'message': 'Prompt has already been reverted', 'revert': correlation['revert']}
|
||||||
|
|
||||||
|
original_commit = next(
|
||||||
|
(commit for commit in correlation.get('commits', []) if commit.get('remote_status') != 'reverted' and commit.get('commit_hash')),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if original_commit is None:
|
||||||
|
return {'status': 'error', 'message': 'No reversible commit was recorded for this prompt'}
|
||||||
|
|
||||||
|
branch = original_commit.get('branch') or f'ai/{project_id}'
|
||||||
|
project_root = settings.projects_root / project_id
|
||||||
|
git_manager = GitManager(project_id, project_dir=str(project_root))
|
||||||
|
if not git_manager.has_repo():
|
||||||
|
return {'status': 'error', 'message': 'Local project repository is not available for undo'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
git_manager.checkout_branch(branch)
|
||||||
|
previous_head = git_manager.current_head_or_none()
|
||||||
|
revert_commit_hash = git_manager.revert_commit(original_commit['commit_hash'])
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||||
|
return {'status': 'error', 'message': f'Unable to revert prompt commit: {exc}'}
|
||||||
|
|
||||||
|
repository = self.db_manager.get_project_audit_data(project_id).get('repository') or {}
|
||||||
|
commit_url = None
|
||||||
|
compare_url = None
|
||||||
|
if (
|
||||||
|
repository.get('mode') == 'project'
|
||||||
|
and repository.get('status') in {'created', 'exists', 'ready'}
|
||||||
|
and settings.gitea_token
|
||||||
|
and repository.get('owner')
|
||||||
|
and repository.get('name')
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
user_info = await self.gitea_api.get_current_user()
|
||||||
|
username = user_info.get('login') if isinstance(user_info, dict) else None
|
||||||
|
if username and not user_info.get('error'):
|
||||||
|
remote_url = repository.get('clone_url') or self.gitea_api.build_repo_git_url(repository.get('owner'), repository.get('name'))
|
||||||
|
if remote_url:
|
||||||
|
git_manager.push_with_credentials(
|
||||||
|
remote_url=remote_url,
|
||||||
|
username=username,
|
||||||
|
password=settings.gitea_token,
|
||||||
|
branch=branch,
|
||||||
|
)
|
||||||
|
commit_url = self.gitea_api.build_commit_url(revert_commit_hash, repository.get('owner'), repository.get('name'))
|
||||||
|
if previous_head:
|
||||||
|
compare_url = self.gitea_api.build_compare_url(previous_head, revert_commit_hash, repository.get('owner'), repository.get('name'))
|
||||||
|
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.db_manager.log_commit(
|
||||||
|
project_id=project_id,
|
||||||
|
commit_message=f'Revert prompt {prompt_id}',
|
||||||
|
actor='dashboard',
|
||||||
|
actor_type='user',
|
||||||
|
history_id=history.id,
|
||||||
|
prompt_id=prompt_id,
|
||||||
|
commit_hash=revert_commit_hash,
|
||||||
|
changed_files=original_commit.get('changed_files', []),
|
||||||
|
branch=branch,
|
||||||
|
commit_url=commit_url,
|
||||||
|
compare_url=compare_url,
|
||||||
|
remote_status='reverted',
|
||||||
|
)
|
||||||
|
self.db_manager.log_prompt_revert(
|
||||||
|
project_id=project_id,
|
||||||
|
prompt_id=prompt_id,
|
||||||
|
reverted_commit_hash=original_commit['commit_hash'],
|
||||||
|
revert_commit_hash=revert_commit_hash,
|
||||||
|
actor='dashboard',
|
||||||
|
commit_url=commit_url,
|
||||||
|
)
|
||||||
|
self.db_manager.log_system_event(
|
||||||
|
component='git',
|
||||||
|
level='INFO',
|
||||||
|
message=f'Reverted prompt {prompt_id} for project {project_id}',
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'project_id': project_id,
|
||||||
|
'prompt_id': prompt_id,
|
||||||
|
'reverted_commit_hash': original_commit['commit_hash'],
|
||||||
|
'revert_commit_hash': revert_commit_hash,
|
||||||
|
'commit_url': commit_url,
|
||||||
|
'compare_url': compare_url,
|
||||||
|
}
|
||||||
280
ai_software_factory/agents/request_interpreter.py
Normal file
280
ai_software_factory/agents/request_interpreter.py
Normal file
@@ -0,0 +1,280 @@
|
|||||||
|
"""Interpret free-form software requests into structured generation input."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..config import settings
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
|
||||||
|
class RequestInterpreter:
|
||||||
|
"""Use Ollama to turn free-form text into a structured software request."""
|
||||||
|
|
||||||
|
def __init__(self, ollama_url: str | None = None, model: str | None = None):
|
||||||
|
self.ollama_url = (ollama_url or settings.ollama_url).rstrip('/')
|
||||||
|
self.model = model or settings.OLLAMA_MODEL
|
||||||
|
|
||||||
|
async def interpret(self, prompt_text: str, context: dict | None = None) -> dict:
|
||||||
|
"""Interpret free-form text into the request shape expected by the orchestrator."""
|
||||||
|
interpreted, _trace = await self.interpret_with_trace(prompt_text, context=context)
|
||||||
|
return interpreted
|
||||||
|
|
||||||
|
async def interpret_with_trace(self, prompt_text: str, context: dict | None = None) -> tuple[dict, dict]:
|
||||||
|
"""Interpret free-form text into the request shape expected by the orchestrator."""
|
||||||
|
normalized = prompt_text.strip()
|
||||||
|
if not normalized:
|
||||||
|
raise ValueError('Prompt text cannot be empty')
|
||||||
|
|
||||||
|
compact_context = self._build_compact_context(context or {})
|
||||||
|
|
||||||
|
system_prompt = (
|
||||||
|
'You route Telegram software prompts. '
|
||||||
|
'Decide whether the prompt starts a new project or continues an existing tracked project. '
|
||||||
|
'When continuing, identify the best matching project_id from the provided context and the issue number if one is mentioned or implied by recent chat history. '
|
||||||
|
'Return only JSON with keys request and routing. '
|
||||||
|
'request must contain name, description, features, tech_stack. '
|
||||||
|
'routing must contain intent, project_id, project_name, issue_number, confidence, and reasoning_summary. '
|
||||||
|
'Use the provided project catalog and recent chat history. '
|
||||||
|
'If the user says things like also, continue, work on this, that issue, or follow-up wording, prefer continuation of the most relevant recent project. '
|
||||||
|
'If the user explicitly asks for a new project, set intent to new_project.'
|
||||||
|
)
|
||||||
|
user_prompt = normalized
|
||||||
|
if compact_context:
|
||||||
|
user_prompt = (
|
||||||
|
f"Conversation context:\n{json.dumps(compact_context, indent=2)}\n\n"
|
||||||
|
f"User prompt:\n{normalized}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.post(
|
||||||
|
f'{self.ollama_url}/api/chat',
|
||||||
|
json={
|
||||||
|
'model': self.model,
|
||||||
|
'stream': False,
|
||||||
|
'format': 'json',
|
||||||
|
'messages': [
|
||||||
|
{
|
||||||
|
'role': 'system',
|
||||||
|
'content': system_prompt,
|
||||||
|
},
|
||||||
|
{'role': 'user', 'content': user_prompt},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
) as resp:
|
||||||
|
payload = await resp.json()
|
||||||
|
if 200 <= resp.status < 300:
|
||||||
|
content = payload.get('message', {}).get('content', '')
|
||||||
|
if content:
|
||||||
|
parsed = json.loads(content)
|
||||||
|
interpreted = self._normalize_interpreted_request(parsed, normalized)
|
||||||
|
routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context)
|
||||||
|
return interpreted, {
|
||||||
|
'stage': 'request_interpretation',
|
||||||
|
'provider': 'ollama',
|
||||||
|
'model': self.model,
|
||||||
|
'system_prompt': system_prompt,
|
||||||
|
'user_prompt': user_prompt,
|
||||||
|
'assistant_response': content,
|
||||||
|
'raw_response': payload,
|
||||||
|
'routing': routing,
|
||||||
|
'context_excerpt': compact_context,
|
||||||
|
'fallback_used': False,
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
interpreted, routing = self._heuristic_fallback(normalized, compact_context)
|
||||||
|
return interpreted, {
|
||||||
|
'stage': 'request_interpretation',
|
||||||
|
'provider': 'heuristic',
|
||||||
|
'model': self.model,
|
||||||
|
'system_prompt': system_prompt,
|
||||||
|
'user_prompt': user_prompt,
|
||||||
|
'assistant_response': json.dumps({'request': interpreted, 'routing': routing}),
|
||||||
|
'raw_response': {'fallback': 'heuristic'},
|
||||||
|
'routing': routing,
|
||||||
|
'context_excerpt': compact_context,
|
||||||
|
'fallback_used': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _normalize_interpreted_request(self, interpreted: dict, original_prompt: str) -> dict:
|
||||||
|
"""Normalize LLM output into the required request shape."""
|
||||||
|
request_payload = interpreted.get('request') if isinstance(interpreted.get('request'), dict) else interpreted
|
||||||
|
name = str(interpreted.get('name') or '').strip() or self._derive_name(original_prompt)
|
||||||
|
if isinstance(request_payload, dict):
|
||||||
|
name = str(request_payload.get('name') or '').strip() or self._derive_name(original_prompt)
|
||||||
|
description = str((request_payload or {}).get('description') or '').strip() or original_prompt[:255]
|
||||||
|
features = self._normalize_list((request_payload or {}).get('features'))
|
||||||
|
tech_stack = self._normalize_list((request_payload or {}).get('tech_stack'))
|
||||||
|
if not features:
|
||||||
|
features = ['core workflow based on free-form request']
|
||||||
|
return {
|
||||||
|
'name': name[:255],
|
||||||
|
'description': description[:255],
|
||||||
|
'features': features,
|
||||||
|
'tech_stack': tech_stack,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _build_compact_context(self, context: dict) -> dict:
|
||||||
|
"""Reduce interpreter context to the fields that help routing."""
|
||||||
|
projects = []
|
||||||
|
for project in context.get('projects', [])[:10]:
|
||||||
|
issues = []
|
||||||
|
for issue in project.get('open_issues', [])[:5]:
|
||||||
|
issues.append({'number': issue.get('number'), 'title': issue.get('title'), 'state': issue.get('state')})
|
||||||
|
projects.append(
|
||||||
|
{
|
||||||
|
'project_id': project.get('project_id'),
|
||||||
|
'name': project.get('name'),
|
||||||
|
'description': project.get('description'),
|
||||||
|
'repository': project.get('repository'),
|
||||||
|
'open_pull_request': bool(project.get('open_pull_request')),
|
||||||
|
'open_issues': issues,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
'chat_id': context.get('chat_id'),
|
||||||
|
'recent_chat_history': context.get('recent_chat_history', [])[:8],
|
||||||
|
'projects': projects,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _normalize_routing(self, routing: dict | None, interpreted: dict, context: dict) -> dict:
|
||||||
|
"""Normalize routing metadata returned by the LLM."""
|
||||||
|
routing = routing or {}
|
||||||
|
project_id = routing.get('project_id')
|
||||||
|
project_name = routing.get('project_name')
|
||||||
|
issue_number = routing.get('issue_number')
|
||||||
|
if issue_number in ('', None):
|
||||||
|
issue_number = None
|
||||||
|
elif isinstance(issue_number, str) and issue_number.isdigit():
|
||||||
|
issue_number = int(issue_number)
|
||||||
|
matched_project = None
|
||||||
|
for project in context.get('projects', []):
|
||||||
|
if project_id and project.get('project_id') == project_id:
|
||||||
|
matched_project = project
|
||||||
|
break
|
||||||
|
if project_name and project.get('name') == project_name:
|
||||||
|
matched_project = project
|
||||||
|
break
|
||||||
|
intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project')
|
||||||
|
return {
|
||||||
|
'intent': intent,
|
||||||
|
'project_id': matched_project.get('project_id') if matched_project else project_id,
|
||||||
|
'project_name': matched_project.get('name') if matched_project else (project_name or interpreted.get('name')),
|
||||||
|
'issue_number': issue_number,
|
||||||
|
'confidence': routing.get('confidence') or ('medium' if matched_project else 'low'),
|
||||||
|
'reasoning_summary': routing.get('reasoning_summary') or ('Matched prior project context' if matched_project else 'No strong prior project match found'),
|
||||||
|
}
|
||||||
|
|
||||||
|
def _normalize_list(self, value) -> list[str]:
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [str(item).strip() for item in value if str(item).strip()]
|
||||||
|
if isinstance(value, str) and value.strip():
|
||||||
|
return [item.strip() for item in value.split(',') if item.strip()]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _derive_name(self, prompt_text: str) -> str:
|
||||||
|
"""Derive a stable project name when the LLM does not provide one."""
|
||||||
|
first_line = prompt_text.splitlines()[0].strip()
|
||||||
|
quoted = re.search(r'["\']([^"\']{3,80})["\']', first_line)
|
||||||
|
if quoted:
|
||||||
|
return self._humanize_name(quoted.group(1))
|
||||||
|
|
||||||
|
noun_phrase = re.search(
|
||||||
|
r'(?:build|create|start|make|develop|generate|design|need|want)\s+'
|
||||||
|
r'(?:me\s+|us\s+|an?\s+|the\s+|new\s+|internal\s+|simple\s+|lightweight\s+|modern\s+|web\s+|mobile\s+)*'
|
||||||
|
r'([a-z0-9][a-z0-9\s-]{2,80}?(?:portal|dashboard|app|application|service|tool|system|platform|api|bot|assistant|website|site|workspace|tracker|manager))\b',
|
||||||
|
first_line,
|
||||||
|
flags=re.IGNORECASE,
|
||||||
|
)
|
||||||
|
if noun_phrase:
|
||||||
|
return self._humanize_name(noun_phrase.group(1))
|
||||||
|
|
||||||
|
cleaned = re.sub(r'[^A-Za-z0-9 ]+', ' ', first_line)
|
||||||
|
stopwords = {
|
||||||
|
'build', 'create', 'start', 'make', 'develop', 'generate', 'design', 'need', 'want', 'please', 'for', 'our', 'with', 'that', 'this',
|
||||||
|
'new', 'internal', 'simple', 'modern', 'web', 'mobile', 'app', 'application', 'tool', 'system',
|
||||||
|
}
|
||||||
|
tokens = [word for word in cleaned.split() if word and word.lower() not in stopwords]
|
||||||
|
if tokens:
|
||||||
|
return self._humanize_name(' '.join(tokens[:4]))
|
||||||
|
return 'Generated Project'
|
||||||
|
|
||||||
|
def _humanize_name(self, raw_name: str) -> str:
|
||||||
|
"""Normalize a candidate project name into a readable title."""
|
||||||
|
cleaned = re.sub(r'[^A-Za-z0-9\s-]+', ' ', raw_name).strip(' -')
|
||||||
|
cleaned = re.sub(r'\s+', ' ', cleaned)
|
||||||
|
special_upper = {'api', 'crm', 'erp', 'cms', 'hr', 'it', 'ui', 'qa'}
|
||||||
|
words = []
|
||||||
|
for word in cleaned.split()[:6]:
|
||||||
|
lowered = word.lower()
|
||||||
|
words.append(lowered.upper() if lowered in special_upper else lowered.capitalize())
|
||||||
|
return ' '.join(words) or 'Generated Project'
|
||||||
|
|
||||||
|
def _heuristic_fallback(self, prompt_text: str, context: dict | None = None) -> tuple[dict, dict]:
|
||||||
|
"""Fallback request extraction when Ollama is unavailable."""
|
||||||
|
lowered = prompt_text.lower()
|
||||||
|
tech_candidates = [
|
||||||
|
'python', 'fastapi', 'django', 'flask', 'postgresql', 'sqlite', 'react', 'vue', 'nicegui', 'docker'
|
||||||
|
]
|
||||||
|
tech_stack = [candidate for candidate in tech_candidates if candidate in lowered]
|
||||||
|
sentences = [part.strip() for part in re.split(r'[\n\.]+', prompt_text) if part.strip()]
|
||||||
|
features = sentences[:3] or ['Implement the user request from free-form text']
|
||||||
|
interpreted = {
|
||||||
|
'name': self._derive_name(prompt_text),
|
||||||
|
'description': sentences[0][:255] if sentences else prompt_text[:255],
|
||||||
|
'features': features,
|
||||||
|
'tech_stack': tech_stack,
|
||||||
|
}
|
||||||
|
routing = self._heuristic_routing(prompt_text, context or {})
|
||||||
|
if routing.get('project_name'):
|
||||||
|
interpreted['name'] = routing['project_name']
|
||||||
|
return interpreted, routing
|
||||||
|
|
||||||
|
def _heuristic_routing(self, prompt_text: str, context: dict) -> dict:
|
||||||
|
"""Best-effort routing when the LLM is unavailable."""
|
||||||
|
lowered = prompt_text.lower()
|
||||||
|
explicit_new = any(token in lowered for token in ['new project', 'start a new project', 'create a new project', 'build a new app'])
|
||||||
|
referenced_issue = self._extract_issue_number(prompt_text)
|
||||||
|
recent_history = context.get('recent_chat_history', [])
|
||||||
|
projects = context.get('projects', [])
|
||||||
|
last_project_id = recent_history[0].get('project_id') if recent_history else None
|
||||||
|
last_issue = ((recent_history[0].get('related_issue') or {}).get('number') if recent_history else None)
|
||||||
|
|
||||||
|
matched_project = None
|
||||||
|
for project in projects:
|
||||||
|
name = (project.get('name') or '').lower()
|
||||||
|
repo = ((project.get('repository') or {}).get('name') or '').lower()
|
||||||
|
if name and name in lowered:
|
||||||
|
matched_project = project
|
||||||
|
break
|
||||||
|
if repo and repo in lowered:
|
||||||
|
matched_project = project
|
||||||
|
break
|
||||||
|
if matched_project is None and not explicit_new:
|
||||||
|
follow_up_tokens = ['also', 'continue', 'for this project', 'for that project', 'work on this', 'work on that', 'fix that', 'add this']
|
||||||
|
if any(token in lowered for token in follow_up_tokens) and last_project_id:
|
||||||
|
matched_project = next((project for project in projects if project.get('project_id') == last_project_id), None)
|
||||||
|
issue_number = referenced_issue
|
||||||
|
if issue_number is None and any(token in lowered for token in ['that issue', 'this issue', 'the issue']) and last_issue is not None:
|
||||||
|
issue_number = last_issue
|
||||||
|
intent = 'new_project' if explicit_new or matched_project is None else 'continue_project'
|
||||||
|
return {
|
||||||
|
'intent': intent,
|
||||||
|
'project_id': matched_project.get('project_id') if matched_project else None,
|
||||||
|
'project_name': matched_project.get('name') if matched_project else self._derive_name(prompt_text),
|
||||||
|
'issue_number': issue_number,
|
||||||
|
'confidence': 'medium' if matched_project or explicit_new else 'low',
|
||||||
|
'reasoning_summary': 'Heuristic routing from chat history and project names.',
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_issue_number(self, prompt_text: str) -> int | None:
|
||||||
|
match = re.search(r'(?:#|issue\s+)(\d+)', prompt_text, flags=re.IGNORECASE)
|
||||||
|
return int(match.group(1)) if match else None
|
||||||
@@ -1,8 +1,6 @@
|
|||||||
"""Telegram bot integration for n8n webhook."""
|
"""Telegram bot integration for n8n webhook."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
@@ -13,6 +11,59 @@ class TelegramHandler:
|
|||||||
self.webhook_url = webhook_url
|
self.webhook_url = webhook_url
|
||||||
self.api_url = "https://api.telegram.org/bot"
|
self.api_url = "https://api.telegram.org/bot"
|
||||||
|
|
||||||
|
def build_prompt_guide_message(self, backend_url: str | None = None) -> str:
|
||||||
|
"""Build a Telegram message explaining the expected prompt format."""
|
||||||
|
lines = [
|
||||||
|
"AI Software Factory is listening in this chat.",
|
||||||
|
"",
|
||||||
|
"You can send free-form software requests in normal language.",
|
||||||
|
"",
|
||||||
|
"Example:",
|
||||||
|
"Build an internal inventory portal for our warehouse team.",
|
||||||
|
"It should support role-based login, stock dashboards, and purchase orders.",
|
||||||
|
"Prefer FastAPI, PostgreSQL, and a simple web UI.",
|
||||||
|
"",
|
||||||
|
"The backend will interpret the request and turn it into a structured project plan.",
|
||||||
|
]
|
||||||
|
if backend_url:
|
||||||
|
lines.extend(["", f"Backend target: {backend_url}"])
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
async def send_message(self, bot_token: str, chat_id: str | int, text: str) -> dict:
|
||||||
|
"""Send a direct Telegram message using the configured bot."""
|
||||||
|
if not bot_token:
|
||||||
|
return {"status": "error", "message": "Telegram bot token is not configured"}
|
||||||
|
if chat_id in (None, ""):
|
||||||
|
return {"status": "error", "message": "Telegram chat id is not configured"}
|
||||||
|
|
||||||
|
api_endpoint = f"{self.api_url}{bot_token}/sendMessage"
|
||||||
|
|
||||||
|
try:
|
||||||
|
import aiohttp
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.post(
|
||||||
|
api_endpoint,
|
||||||
|
json={
|
||||||
|
"chat_id": str(chat_id),
|
||||||
|
"text": text,
|
||||||
|
},
|
||||||
|
) as resp:
|
||||||
|
payload = await resp.json()
|
||||||
|
if 200 <= resp.status < 300 and payload.get("ok"):
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"message": "Telegram prompt guide sent successfully",
|
||||||
|
"payload": payload,
|
||||||
|
}
|
||||||
|
description = payload.get("description") or payload.get("message") or str(payload)
|
||||||
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"message": f"Telegram API returned {resp.status}: {description}",
|
||||||
|
"payload": payload,
|
||||||
|
}
|
||||||
|
except Exception as exc:
|
||||||
|
return {"status": "error", "message": str(exc)}
|
||||||
|
|
||||||
async def handle_message(self, message_data: dict) -> dict:
|
async def handle_message(self, message_data: dict) -> dict:
|
||||||
"""Handle incoming Telegram message."""
|
"""Handle incoming Telegram message."""
|
||||||
text = message_data.get("text", "")
|
text = message_data.get("text", "")
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""UI manager for web dashboard with audit trail display."""
|
"""UI manager for web dashboard with audit trail display."""
|
||||||
|
|
||||||
|
import html
|
||||||
import json
|
import json
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
|
|
||||||
@@ -50,14 +51,7 @@ class UIManager:
|
|||||||
"""Escape HTML special characters for safe display."""
|
"""Escape HTML special characters for safe display."""
|
||||||
if text is None:
|
if text is None:
|
||||||
return ""
|
return ""
|
||||||
safe_chars = {
|
return html.escape(str(text), quote=True)
|
||||||
'&': '&',
|
|
||||||
'<': '<',
|
|
||||||
'>': '>',
|
|
||||||
'"': '"',
|
|
||||||
"'": '''
|
|
||||||
}
|
|
||||||
return ''.join(safe_chars.get(c, c) for c in str(text))
|
|
||||||
|
|
||||||
def render_dashboard(self, audit_trail: Optional[List[dict]] = None,
|
def render_dashboard(self, audit_trail: Optional[List[dict]] = None,
|
||||||
actions: Optional[List[dict]] = None,
|
actions: Optional[List[dict]] = None,
|
||||||
|
|||||||
37
ai_software_factory/alembic.ini
Normal file
37
ai_software_factory/alembic.ini
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
[alembic]
|
||||||
|
script_location = alembic
|
||||||
|
prepend_sys_path = .
|
||||||
|
path_separator = os
|
||||||
|
sqlalchemy.url = sqlite:////tmp/ai_software_factory_test.db
|
||||||
|
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers = console
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
50
ai_software_factory/alembic/env.py
Normal file
50
ai_software_factory/alembic/env.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
"""Alembic environment for AI Software Factory."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ai_software_factory.models import Base
|
||||||
|
except ImportError:
|
||||||
|
from models import Base
|
||||||
|
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in offline mode."""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in online mode."""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
17
ai_software_factory/alembic/script.py.mako
Normal file
17
ai_software_factory/alembic/script.py.mako
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
"""${message}"""
|
||||||
|
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
@@ -0,0 +1,164 @@
|
|||||||
|
"""initial schema
|
||||||
|
|
||||||
|
Revision ID: 20260410_01
|
||||||
|
Revises:
|
||||||
|
Create Date: 2026-04-10 00:00:00
|
||||||
|
"""
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
revision = "20260410_01"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"agent_actions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("agent_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("action_type", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("success", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("message", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("timestamp", sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"audit_trail",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("component", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("message", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("project_id", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("action", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("actor", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("action_type", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("details", sa.Text(), nullable=True),
|
||||||
|
sa.Column("metadata_json", sa.JSON(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"project_history",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("project_id", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("project_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("features", sa.Text(), nullable=True),
|
||||||
|
sa.Column("description", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("status", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("progress", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("message", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("current_step", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("total_steps", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("current_step_description", sa.String(length=1024), nullable=True),
|
||||||
|
sa.Column("current_step_details", sa.Text(), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("started_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"system_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("component", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("log_message", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("user_agent", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("ip_address", sa.String(length=45), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"project_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("log_message", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("timestamp", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"prompt_code_links",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("project_id", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("prompt_audit_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("code_change_audit_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("file_path", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("change_type", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"pull_request_data",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("pr_number", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("pr_title", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("pr_body", sa.Text(), nullable=True),
|
||||||
|
sa.Column("pr_state", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("pr_url", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"pull_requests",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("pr_number", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("pr_title", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("pr_body", sa.Text(), nullable=True),
|
||||||
|
sa.Column("base", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("user", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("pr_url", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("merged", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("merged_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("pr_state", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"ui_snapshots",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("snapshot_data", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"user_actions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("history_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("user_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("action_type", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("actor_type", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("actor_name", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("action_description", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("action_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table("user_actions")
|
||||||
|
op.drop_table("ui_snapshots")
|
||||||
|
op.drop_table("pull_requests")
|
||||||
|
op.drop_table("pull_request_data")
|
||||||
|
op.drop_table("prompt_code_links")
|
||||||
|
op.drop_table("project_logs")
|
||||||
|
op.drop_table("system_logs")
|
||||||
|
op.drop_table("project_history")
|
||||||
|
op.drop_table("audit_trail")
|
||||||
|
op.drop_table("agent_actions")
|
||||||
@@ -4,12 +4,18 @@ import os
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
"""Application settings loaded from environment variables."""
|
"""Application settings loaded from environment variables."""
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
env_file=".env",
|
||||||
|
env_file_encoding="utf-8",
|
||||||
|
extra="ignore",
|
||||||
|
)
|
||||||
|
|
||||||
# Server settings
|
# Server settings
|
||||||
HOST: str = "0.0.0.0"
|
HOST: str = "0.0.0.0"
|
||||||
PORT: int = 8000
|
PORT: int = 8000
|
||||||
@@ -23,14 +29,20 @@ class Settings(BaseSettings):
|
|||||||
GITEA_URL: str = "https://gitea.yourserver.com"
|
GITEA_URL: str = "https://gitea.yourserver.com"
|
||||||
GITEA_TOKEN: str = ""
|
GITEA_TOKEN: str = ""
|
||||||
GITEA_OWNER: str = "ai-software-factory"
|
GITEA_OWNER: str = "ai-software-factory"
|
||||||
GITEA_REPO: str = "ai-software-factory"
|
GITEA_REPO: str = ""
|
||||||
|
|
||||||
# n8n settings
|
# n8n settings
|
||||||
N8N_WEBHOOK_URL: str = ""
|
N8N_WEBHOOK_URL: str = ""
|
||||||
N8N_API_URL: str = ""
|
N8N_API_URL: str = ""
|
||||||
|
N8N_API_KEY: str = ""
|
||||||
|
N8N_TELEGRAM_CREDENTIAL_NAME: str = "AI Software Factory Telegram"
|
||||||
N8N_USER: str = ""
|
N8N_USER: str = ""
|
||||||
N8N_PASSWORD: str = ""
|
N8N_PASSWORD: str = ""
|
||||||
|
|
||||||
|
# Runtime integration settings
|
||||||
|
BACKEND_PUBLIC_URL: str = "http://localhost:8000"
|
||||||
|
PROJECTS_ROOT: str = ""
|
||||||
|
|
||||||
# Telegram settings
|
# Telegram settings
|
||||||
TELEGRAM_BOT_TOKEN: str = ""
|
TELEGRAM_BOT_TOKEN: str = ""
|
||||||
TELEGRAM_CHAT_ID: str = ""
|
TELEGRAM_CHAT_ID: str = ""
|
||||||
@@ -54,6 +66,32 @@ class Settings(BaseSettings):
|
|||||||
DB_POOL_RECYCLE: int = 3600
|
DB_POOL_RECYCLE: int = 3600
|
||||||
DB_POOL_TIMEOUT: int = 30
|
DB_POOL_TIMEOUT: int = 30
|
||||||
|
|
||||||
|
@property
|
||||||
|
def postgres_url(self) -> str:
|
||||||
|
"""Get PostgreSQL URL with trimmed whitespace."""
|
||||||
|
return (self.POSTGRES_URL or "").strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def postgres_env_configured(self) -> bool:
|
||||||
|
"""Whether PostgreSQL was explicitly configured via environment variables."""
|
||||||
|
if self.postgres_url:
|
||||||
|
return True
|
||||||
|
postgres_env_keys = (
|
||||||
|
"POSTGRES_HOST",
|
||||||
|
"POSTGRES_PORT",
|
||||||
|
"POSTGRES_USER",
|
||||||
|
"POSTGRES_PASSWORD",
|
||||||
|
"POSTGRES_DB",
|
||||||
|
)
|
||||||
|
return any(bool(os.environ.get(key, "").strip()) for key in postgres_env_keys)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_sqlite(self) -> bool:
|
||||||
|
"""Whether SQLite should be used as the active database backend."""
|
||||||
|
if not self.USE_SQLITE:
|
||||||
|
return False
|
||||||
|
return not self.postgres_env_configured
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pool(self) -> dict:
|
def pool(self) -> dict:
|
||||||
"""Get database pool configuration."""
|
"""Get database pool configuration."""
|
||||||
@@ -67,8 +105,10 @@ class Settings(BaseSettings):
|
|||||||
@property
|
@property
|
||||||
def database_url(self) -> str:
|
def database_url(self) -> str:
|
||||||
"""Get database connection URL."""
|
"""Get database connection URL."""
|
||||||
if self.USE_SQLITE:
|
if self.use_sqlite:
|
||||||
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
||||||
|
if self.postgres_url:
|
||||||
|
return self.postgres_url
|
||||||
return (
|
return (
|
||||||
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
||||||
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
|
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
|
||||||
@@ -77,8 +117,10 @@ class Settings(BaseSettings):
|
|||||||
@property
|
@property
|
||||||
def test_database_url(self) -> str:
|
def test_database_url(self) -> str:
|
||||||
"""Get test database connection URL."""
|
"""Get test database connection URL."""
|
||||||
if self.USE_SQLITE:
|
if self.use_sqlite:
|
||||||
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
||||||
|
if self.postgres_url:
|
||||||
|
return self.postgres_url
|
||||||
return (
|
return (
|
||||||
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
||||||
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_TEST_DB}"
|
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_TEST_DB}"
|
||||||
@@ -99,11 +141,41 @@ class Settings(BaseSettings):
|
|||||||
"""Get Gitea token with trimmed whitespace."""
|
"""Get Gitea token with trimmed whitespace."""
|
||||||
return self.GITEA_TOKEN.strip()
|
return self.GITEA_TOKEN.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gitea_owner(self) -> str:
|
||||||
|
"""Get Gitea owner/organization with trimmed whitespace."""
|
||||||
|
return self.GITEA_OWNER.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gitea_repo(self) -> str:
|
||||||
|
"""Get the optional fixed Gitea repository name with trimmed whitespace."""
|
||||||
|
return self.GITEA_REPO.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_project_repositories(self) -> bool:
|
||||||
|
"""Whether the service should create one repository per generated project."""
|
||||||
|
return not bool(self.gitea_repo)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def n8n_webhook_url(self) -> str:
|
def n8n_webhook_url(self) -> str:
|
||||||
"""Get n8n webhook URL with trimmed whitespace."""
|
"""Get n8n webhook URL with trimmed whitespace."""
|
||||||
return self.N8N_WEBHOOK_URL.strip()
|
return self.N8N_WEBHOOK_URL.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def n8n_api_url(self) -> str:
|
||||||
|
"""Get n8n API URL with trimmed whitespace."""
|
||||||
|
return self.N8N_API_URL.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def n8n_api_key(self) -> str:
|
||||||
|
"""Get n8n API key with trimmed whitespace."""
|
||||||
|
return self.N8N_API_KEY.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def n8n_telegram_credential_name(self) -> str:
|
||||||
|
"""Get the preferred n8n Telegram credential name."""
|
||||||
|
return self.N8N_TELEGRAM_CREDENTIAL_NAME.strip() or "AI Software Factory Telegram"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def telegram_bot_token(self) -> str:
|
def telegram_bot_token(self) -> str:
|
||||||
"""Get Telegram bot token with trimmed whitespace."""
|
"""Get Telegram bot token with trimmed whitespace."""
|
||||||
@@ -114,6 +186,18 @@ class Settings(BaseSettings):
|
|||||||
"""Get Telegram chat ID with trimmed whitespace."""
|
"""Get Telegram chat ID with trimmed whitespace."""
|
||||||
return self.TELEGRAM_CHAT_ID.strip()
|
return self.TELEGRAM_CHAT_ID.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def backend_public_url(self) -> str:
|
||||||
|
"""Get backend public URL with trimmed whitespace."""
|
||||||
|
return self.BACKEND_PUBLIC_URL.strip().rstrip("/")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def projects_root(self) -> Path:
|
||||||
|
"""Get the root directory for generated project artifacts."""
|
||||||
|
if self.PROJECTS_ROOT.strip():
|
||||||
|
return Path(self.PROJECTS_ROOT).expanduser().resolve()
|
||||||
|
return Path(__file__).resolve().parent.parent / "test-project"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def postgres_host(self) -> str:
|
def postgres_host(self) -> str:
|
||||||
"""Get PostgreSQL host."""
|
"""Get PostgreSQL host."""
|
||||||
@@ -144,11 +228,5 @@ class Settings(BaseSettings):
|
|||||||
"""Get test PostgreSQL database name."""
|
"""Get test PostgreSQL database name."""
|
||||||
return self.POSTGRES_TEST_DB.strip()
|
return self.POSTGRES_TEST_DB.strip()
|
||||||
|
|
||||||
class Config:
|
|
||||||
env_file = ".env"
|
|
||||||
env_file_encoding = "utf-8"
|
|
||||||
extra = "ignore"
|
|
||||||
|
|
||||||
|
|
||||||
# Create instance for module-level access
|
# Create instance for module-level access
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,50 @@
|
|||||||
"""Database connection and session management."""
|
"""Database connection and session management."""
|
||||||
|
|
||||||
from sqlalchemy import create_engine, text
|
from collections.abc import Generator
|
||||||
from sqlalchemy.orm import sessionmaker, Session
|
from pathlib import Path
|
||||||
from config import settings
|
from urllib.parse import urlparse
|
||||||
from models import Base
|
|
||||||
|
from alembic import command
|
||||||
|
from alembic.config import Config
|
||||||
|
from sqlalchemy import create_engine, event, text
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlalchemy.orm import Session, sessionmaker
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .config import settings
|
||||||
|
from .models import Base
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
from models import Base
|
||||||
|
|
||||||
|
|
||||||
def get_engine() -> create_engine:
|
def get_database_runtime_summary() -> dict[str, str]:
|
||||||
|
"""Return a human-readable summary of the effective database backend."""
|
||||||
|
if settings.use_sqlite:
|
||||||
|
db_path = str(Path(settings.SQLITE_DB_PATH or "/tmp/ai_software_factory_test.db").expanduser().resolve())
|
||||||
|
return {
|
||||||
|
"backend": "sqlite",
|
||||||
|
"target": db_path,
|
||||||
|
"database": db_path,
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed = urlparse(settings.database_url)
|
||||||
|
database_name = parsed.path.lstrip("/") or "unknown"
|
||||||
|
host = parsed.hostname or "unknown-host"
|
||||||
|
port = str(parsed.port or 5432)
|
||||||
|
return {
|
||||||
|
"backend": parsed.scheme.split("+", 1)[0] or "postgresql",
|
||||||
|
"target": f"{host}:{port}/{database_name}",
|
||||||
|
"database": database_name,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine() -> Engine:
|
||||||
"""Create and return SQLAlchemy engine with connection pooling."""
|
"""Create and return SQLAlchemy engine with connection pooling."""
|
||||||
# Use SQLite for tests, PostgreSQL for production
|
# Use SQLite for tests, PostgreSQL for production
|
||||||
if settings.USE_SQLITE:
|
if settings.use_sqlite:
|
||||||
db_path = settings.SQLITE_DB_PATH or "/tmp/ai_software_factory_test.db"
|
db_path = settings.SQLITE_DB_PATH or "/tmp/ai_software_factory_test.db"
|
||||||
|
Path(db_path).expanduser().resolve().parent.mkdir(parents=True, exist_ok=True)
|
||||||
db_url = f"sqlite:///{db_path}"
|
db_url = f"sqlite:///{db_path}"
|
||||||
# SQLite-specific configuration - no pooling for SQLite
|
# SQLite-specific configuration - no pooling for SQLite
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
@@ -19,7 +53,7 @@ def get_engine() -> create_engine:
|
|||||||
echo=settings.LOG_LEVEL == "DEBUG"
|
echo=settings.LOG_LEVEL == "DEBUG"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
db_url = settings.POSTGRES_URL or settings.database_url
|
db_url = settings.database_url
|
||||||
# PostgreSQL-specific configuration
|
# PostgreSQL-specific configuration
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
db_url,
|
db_url,
|
||||||
@@ -31,7 +65,7 @@ def get_engine() -> create_engine:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Event listener for connection checkout (PostgreSQL only)
|
# Event listener for connection checkout (PostgreSQL only)
|
||||||
if not settings.USE_SQLITE:
|
if not settings.use_sqlite:
|
||||||
@event.listens_for(engine, "checkout")
|
@event.listens_for(engine, "checkout")
|
||||||
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
||||||
"""Log connection checkout for audit purposes."""
|
"""Log connection checkout for audit purposes."""
|
||||||
@@ -47,37 +81,27 @@ def get_engine() -> create_engine:
|
|||||||
return engine
|
return engine
|
||||||
|
|
||||||
|
|
||||||
def get_session() -> Session:
|
def get_session() -> Generator[Session, None, None]:
|
||||||
"""Create and return database session factory."""
|
"""Yield a managed database session."""
|
||||||
engine = get_engine()
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
|
||||||
|
|
||||||
def session_factory() -> Session:
|
|
||||||
session = SessionLocal()
|
|
||||||
try:
|
|
||||||
yield session
|
|
||||||
session.commit()
|
|
||||||
except Exception:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
return session_factory
|
|
||||||
|
|
||||||
|
|
||||||
def get_db() -> Session:
|
|
||||||
"""Dependency for FastAPI routes that need database access."""
|
|
||||||
engine = get_engine()
|
engine = get_engine()
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
session = SessionLocal()
|
session = SessionLocal()
|
||||||
try:
|
try:
|
||||||
yield session
|
yield session
|
||||||
|
session.commit()
|
||||||
|
except Exception:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
finally:
|
finally:
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def get_db() -> Generator[Session, None, None]:
|
||||||
|
"""Dependency for FastAPI routes that need database access."""
|
||||||
|
yield from get_session()
|
||||||
|
|
||||||
|
|
||||||
def get_db_sync() -> Session:
|
def get_db_sync() -> Session:
|
||||||
"""Get a database session directly (for non-FastAPI/NiceGUI usage)."""
|
"""Get a database session directly (for non-FastAPI/NiceGUI usage)."""
|
||||||
engine = get_engine()
|
engine = get_engine()
|
||||||
@@ -92,21 +116,44 @@ def get_db_session() -> Session:
|
|||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
def get_alembic_config(database_url: str | None = None) -> Config:
|
||||||
|
"""Return an Alembic config bound to the active database URL."""
|
||||||
|
package_root = Path(__file__).resolve().parent
|
||||||
|
alembic_ini = package_root / "alembic.ini"
|
||||||
|
config = Config(str(alembic_ini))
|
||||||
|
config.set_main_option("script_location", str(package_root / "alembic"))
|
||||||
|
config.set_main_option("sqlalchemy.url", database_url or settings.database_url)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations(database_url: str | None = None) -> dict:
|
||||||
|
"""Apply Alembic migrations to the configured database."""
|
||||||
|
try:
|
||||||
|
config = get_alembic_config(database_url)
|
||||||
|
command.upgrade(config, "head")
|
||||||
|
return {"status": "success", "message": "Database migrations applied."}
|
||||||
|
except Exception as exc:
|
||||||
|
return {"status": "error", "message": str(exc)}
|
||||||
|
|
||||||
|
|
||||||
def init_db() -> dict:
|
def init_db() -> dict:
|
||||||
"""Initialize database tables and database if needed."""
|
"""Initialize database tables and database if needed."""
|
||||||
if settings.USE_SQLITE:
|
if settings.use_sqlite:
|
||||||
# SQLite - auto-creates file and tables
|
result = run_migrations()
|
||||||
|
if result["status"] == "success":
|
||||||
|
print("SQLite database migrations applied successfully.")
|
||||||
|
return {"status": "success", "message": "SQLite database initialized via migrations."}
|
||||||
engine = get_engine()
|
engine = get_engine()
|
||||||
try:
|
try:
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
print("SQLite database tables created successfully.")
|
print("SQLite database tables created successfully.")
|
||||||
return {'status': 'success', 'message': 'SQLite database initialized.'}
|
return {"status": "success", "message": "SQLite database initialized with metadata fallback."}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error initializing SQLite database: {str(e)}")
|
print(f"Error initializing SQLite database: {str(e)}")
|
||||||
return {'status': 'error', 'message': f'Error: {str(e)}'}
|
return {'status': 'error', 'message': f'Error: {str(e)}'}
|
||||||
else:
|
else:
|
||||||
# PostgreSQL
|
# PostgreSQL
|
||||||
db_url = settings.POSTGRES_URL or settings.database_url
|
db_url = settings.database_url
|
||||||
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -139,10 +186,14 @@ def init_db() -> dict:
|
|||||||
# Just create tables in postgres database for now
|
# Just create tables in postgres database for now
|
||||||
print(f"Using existing 'postgres' database.")
|
print(f"Using existing 'postgres' database.")
|
||||||
|
|
||||||
# Create tables
|
migration_result = run_migrations(db_url)
|
||||||
Base.metadata.create_all(bind=engine)
|
if migration_result["status"] == "success":
|
||||||
print(f"PostgreSQL database '{db_name}' tables created successfully.")
|
print(f"PostgreSQL database '{db_name}' migrations applied successfully.")
|
||||||
return {'status': 'success', 'message': f'PostgreSQL database "{db_name}" initialized.'}
|
return {'status': 'success', 'message': f'PostgreSQL database "{db_name}" initialized via migrations.'}
|
||||||
|
|
||||||
|
Base.metadata.create_all(bind=engine)
|
||||||
|
print(f"PostgreSQL database '{db_name}' tables created successfully.")
|
||||||
|
return {'status': 'success', 'message': f'PostgreSQL database "{db_name}" initialized with metadata fallback.'}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error initializing PostgreSQL database: {str(e)}")
|
print(f"Error initializing PostgreSQL database: {str(e)}")
|
||||||
@@ -151,7 +202,7 @@ def init_db() -> dict:
|
|||||||
|
|
||||||
def drop_db() -> dict:
|
def drop_db() -> dict:
|
||||||
"""Drop all database tables (use with caution!)."""
|
"""Drop all database tables (use with caution!)."""
|
||||||
if settings.USE_SQLITE:
|
if settings.use_sqlite:
|
||||||
engine = get_engine()
|
engine = get_engine()
|
||||||
try:
|
try:
|
||||||
Base.metadata.drop_all(bind=engine)
|
Base.metadata.drop_all(bind=engine)
|
||||||
@@ -161,7 +212,7 @@ def drop_db() -> dict:
|
|||||||
print(f"Error dropping SQLite tables: {str(e)}")
|
print(f"Error dropping SQLite tables: {str(e)}")
|
||||||
return {'status': 'error', 'message': str(e)}
|
return {'status': 'error', 'message': str(e)}
|
||||||
else:
|
else:
|
||||||
db_url = settings.POSTGRES_URL or settings.database_url
|
db_url = settings.database_url
|
||||||
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -176,27 +227,4 @@ def drop_db() -> dict:
|
|||||||
|
|
||||||
def create_migration_script() -> str:
|
def create_migration_script() -> str:
|
||||||
"""Generate a migration script for database schema changes."""
|
"""Generate a migration script for database schema changes."""
|
||||||
return '''-- Migration script for AI Software Factory database
|
return """See ai_software_factory/alembic/versions for managed schema migrations."""
|
||||||
-- Generated automatically - review before applying
|
|
||||||
|
|
||||||
-- Add new columns to existing tables if needed
|
|
||||||
-- This is a placeholder for future migrations
|
|
||||||
|
|
||||||
-- Example: Add audit_trail_index for better query performance
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_audit_trail_timestamp ON audit_trail(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_audit_trail_action ON audit_trail(action);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_audit_trail_project ON audit_trail(project_id);
|
|
||||||
|
|
||||||
-- Example: Add user_actions_index for better query performance
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_actions_timestamp ON user_actions(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_actions_actor ON user_actions(actor_type, actor_name);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_actions_history ON user_actions(history_id);
|
|
||||||
|
|
||||||
-- Example: Add project_logs_index for better query performance
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_project_logs_timestamp ON project_logs(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_project_logs_level ON project_logs(log_level);
|
|
||||||
|
|
||||||
-- Example: Add system_logs_index for better query performance
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_system_logs_timestamp ON system_logs(timestamp);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_system_logs_component ON system_logs(component);
|
|
||||||
'''
|
|
||||||
@@ -5,9 +5,14 @@ The dashboard shown is from dashboard_ui.py with real-time database data.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
from nicegui import app, ui
|
from nicegui import app, ui
|
||||||
from dashboard_ui import create_dashboard
|
|
||||||
|
try:
|
||||||
|
from .dashboard_ui import create_dashboard, create_health_page
|
||||||
|
except ImportError:
|
||||||
|
from dashboard_ui import create_dashboard, create_health_page
|
||||||
|
|
||||||
|
|
||||||
def init(fastapi_app: FastAPI, storage_secret: str = 'Secr2t!') -> None:
|
def init(fastapi_app: FastAPI, storage_secret: str = 'Secr2t!') -> None:
|
||||||
@@ -18,14 +23,30 @@ def init(fastapi_app: FastAPI, storage_secret: str = 'Secr2t!') -> None:
|
|||||||
storage_secret: Optional secret for persistent user storage.
|
storage_secret: Optional secret for persistent user storage.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ui.page('/show')
|
def render_dashboard_page() -> None:
|
||||||
def show():
|
ui.page_title('AI Software Factory')
|
||||||
create_dashboard()
|
create_dashboard()
|
||||||
|
|
||||||
# NOTE dark mode will be persistent for each user across tabs and server restarts
|
# NOTE dark mode will be persistent for each user across tabs and server restarts
|
||||||
ui.dark_mode().bind_value(app.storage.user, 'dark_mode')
|
ui.dark_mode().bind_value(app.storage.user, 'dark_mode')
|
||||||
ui.checkbox('dark mode').bind_value(app.storage.user, 'dark_mode')
|
ui.checkbox('dark mode').bind_value(app.storage.user, 'dark_mode')
|
||||||
|
|
||||||
|
@ui.page('/')
|
||||||
|
def home() -> None:
|
||||||
|
render_dashboard_page()
|
||||||
|
|
||||||
|
@ui.page('/show')
|
||||||
|
def show() -> None:
|
||||||
|
render_dashboard_page()
|
||||||
|
|
||||||
|
@ui.page('/health-ui')
|
||||||
|
def health_ui() -> None:
|
||||||
|
create_health_page()
|
||||||
|
|
||||||
|
@fastapi_app.get('/dashboard', include_in_schema=False)
|
||||||
|
def dashboard_redirect() -> RedirectResponse:
|
||||||
|
return RedirectResponse(url='/', status_code=307)
|
||||||
|
|
||||||
ui.run_with(
|
ui.run_with(
|
||||||
fastapi_app,
|
fastapi_app,
|
||||||
storage_secret=storage_secret, # NOTE setting a secret is optional but allows for persistent storage per user
|
storage_secret=storage_secret, # NOTE setting a secret is optional but allows for persistent storage per user
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
echo "Hello world"
|
|
||||||
@@ -6,29 +6,774 @@ This application uses FastAPI to:
|
|||||||
2. Host NiceGUI frontend via ui.run_with()
|
2. Host NiceGUI frontend via ui.run_with()
|
||||||
|
|
||||||
The NiceGUI frontend provides:
|
The NiceGUI frontend provides:
|
||||||
1. Interactive dashboard at /show
|
1. Interactive dashboard at /
|
||||||
2. Real-time data visualization
|
2. Real-time data visualization
|
||||||
3. Audit trail display
|
3. Audit trail display
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import frontend
|
from __future__ import annotations
|
||||||
from fastapi import FastAPI
|
|
||||||
from database import init_db
|
|
||||||
|
|
||||||
app = FastAPI()
|
from contextlib import asynccontextmanager
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Annotated
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from fastapi import Depends, FastAPI, HTTPException, Query
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import __version__, frontend
|
||||||
|
from . import database as database_module
|
||||||
|
from .agents.change_summary import ChangeSummaryGenerator
|
||||||
|
from .agents.database_manager import DatabaseManager
|
||||||
|
from .agents.request_interpreter import RequestInterpreter
|
||||||
|
from .agents.orchestrator import AgentOrchestrator
|
||||||
|
from .agents.n8n_setup import N8NSetupAgent
|
||||||
|
from .agents.prompt_workflow import PromptWorkflowManager
|
||||||
|
from .agents.ui_manager import UIManager
|
||||||
|
from .models import ProjectHistory, ProjectLog, SystemLog
|
||||||
|
except ImportError:
|
||||||
|
import frontend
|
||||||
|
import database as database_module
|
||||||
|
from agents.change_summary import ChangeSummaryGenerator
|
||||||
|
from agents.database_manager import DatabaseManager
|
||||||
|
from agents.request_interpreter import RequestInterpreter
|
||||||
|
from agents.orchestrator import AgentOrchestrator
|
||||||
|
from agents.n8n_setup import N8NSetupAgent
|
||||||
|
from agents.prompt_workflow import PromptWorkflowManager
|
||||||
|
from agents.ui_manager import UIManager
|
||||||
|
from models import ProjectHistory, ProjectLog, SystemLog
|
||||||
|
|
||||||
|
__version__ = "0.0.1"
|
||||||
|
|
||||||
|
|
||||||
@app.get('/')
|
@asynccontextmanager
|
||||||
def read_root():
|
async def lifespan(_app: FastAPI):
|
||||||
"""Root endpoint that returns welcome message."""
|
"""Log resolved runtime configuration when the app starts."""
|
||||||
return {'Hello': 'World'}
|
runtime = database_module.get_database_runtime_summary()
|
||||||
|
print(
|
||||||
|
f"Runtime configuration: database_backend={runtime['backend']} target={runtime['target']}"
|
||||||
|
)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI(lifespan=lifespan)
|
||||||
|
|
||||||
|
DbSession = Annotated[Session, Depends(database_module.get_db)]
|
||||||
|
PROJECT_ID_PATTERN = re.compile(r"[^a-z0-9]+")
|
||||||
|
|
||||||
|
|
||||||
|
class SoftwareRequest(BaseModel):
|
||||||
|
"""Request body for software generation."""
|
||||||
|
|
||||||
|
name: str = Field(min_length=1, max_length=255)
|
||||||
|
description: str = Field(min_length=1, max_length=255)
|
||||||
|
features: list[str] = Field(default_factory=list)
|
||||||
|
tech_stack: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class N8NSetupRequest(BaseModel):
|
||||||
|
"""Request body for n8n workflow provisioning."""
|
||||||
|
|
||||||
|
api_url: str | None = None
|
||||||
|
api_key: str | None = None
|
||||||
|
webhook_path: str = "telegram"
|
||||||
|
backend_url: str | None = None
|
||||||
|
force_update: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class FreeformSoftwareRequest(BaseModel):
|
||||||
|
"""Request body for free-form software generation."""
|
||||||
|
|
||||||
|
prompt_text: str = Field(min_length=1)
|
||||||
|
source: str = 'telegram'
|
||||||
|
chat_id: str | None = None
|
||||||
|
chat_type: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class GiteaRepositoryOnboardRequest(BaseModel):
|
||||||
|
"""Request body for onboarding a manually created Gitea repository."""
|
||||||
|
|
||||||
|
repo_name: str = Field(min_length=1, max_length=255)
|
||||||
|
owner: str | None = None
|
||||||
|
sync_commits: bool = True
|
||||||
|
commit_limit: int = Field(default=25, ge=1, le=200)
|
||||||
|
|
||||||
|
|
||||||
|
def _build_project_id(name: str) -> str:
|
||||||
|
"""Create a stable project id from the requested name."""
|
||||||
|
slug = PROJECT_ID_PATTERN.sub("-", name.strip().lower()).strip("-") or "project"
|
||||||
|
return f"{slug}-{uuid4().hex[:8]}"
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_project(history: ProjectHistory) -> dict:
|
||||||
|
"""Serialize a project history row for API responses."""
|
||||||
|
return {
|
||||||
|
"history_id": history.id,
|
||||||
|
"project_id": history.project_id,
|
||||||
|
"name": history.project_name,
|
||||||
|
"description": history.description,
|
||||||
|
"status": history.status,
|
||||||
|
"progress": history.progress,
|
||||||
|
"message": history.message,
|
||||||
|
"current_step": history.current_step,
|
||||||
|
"error_message": history.error_message,
|
||||||
|
"created_at": history.created_at.isoformat() if history.created_at else None,
|
||||||
|
"updated_at": history.updated_at.isoformat() if history.updated_at else None,
|
||||||
|
"completed_at": history.completed_at.isoformat() if history.completed_at else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_project_log(log: ProjectLog) -> dict:
|
||||||
|
"""Serialize a project log row."""
|
||||||
|
return {
|
||||||
|
"id": log.id,
|
||||||
|
"history_id": log.history_id,
|
||||||
|
"level": log.log_level,
|
||||||
|
"message": log.log_message,
|
||||||
|
"timestamp": log.timestamp.isoformat() if log.timestamp else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_system_log(log: SystemLog) -> dict:
|
||||||
|
"""Serialize a system log row."""
|
||||||
|
return {
|
||||||
|
"id": log.id,
|
||||||
|
"component": log.component,
|
||||||
|
"level": log.log_level,
|
||||||
|
"message": log.log_message,
|
||||||
|
"user_agent": log.user_agent,
|
||||||
|
"ip_address": log.ip_address,
|
||||||
|
"timestamp": log.created_at.isoformat() if log.created_at else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_audit_item(item: dict) -> dict:
|
||||||
|
"""Return audit-shaped dictionaries unchanged for API output."""
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def _compose_prompt_text(request: SoftwareRequest) -> str:
|
||||||
|
"""Render the originating software request into a stable prompt string."""
|
||||||
|
features = ", ".join(request.features) if request.features else "None"
|
||||||
|
tech_stack = ", ".join(request.tech_stack) if request.tech_stack else "None"
|
||||||
|
return (
|
||||||
|
f"Name: {request.name}\n"
|
||||||
|
f"Description: {request.description}\n"
|
||||||
|
f"Features: {features}\n"
|
||||||
|
f"Tech Stack: {tech_stack}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _run_generation(
|
||||||
|
request: SoftwareRequest,
|
||||||
|
db: Session,
|
||||||
|
prompt_text: str | None = None,
|
||||||
|
prompt_actor: str = 'api',
|
||||||
|
prompt_source_context: dict | None = None,
|
||||||
|
prompt_routing: dict | None = None,
|
||||||
|
preferred_project_id: str | None = None,
|
||||||
|
related_issue: dict | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Run the shared generation pipeline for a structured request."""
|
||||||
|
database_module.init_db()
|
||||||
|
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
reusable_history = manager.get_project_by_id(preferred_project_id, include_archived=False) if preferred_project_id else manager.get_latest_project_by_name(request.name)
|
||||||
|
if reusable_history and database_module.settings.gitea_url and database_module.settings.gitea_token:
|
||||||
|
try:
|
||||||
|
from .agents.gitea import GiteaAPI
|
||||||
|
except ImportError:
|
||||||
|
from agents.gitea import GiteaAPI
|
||||||
|
manager.sync_pull_request_states(
|
||||||
|
GiteaAPI(
|
||||||
|
token=database_module.settings.GITEA_TOKEN,
|
||||||
|
base_url=database_module.settings.GITEA_URL,
|
||||||
|
owner=database_module.settings.GITEA_OWNER,
|
||||||
|
repo=database_module.settings.GITEA_REPO or '',
|
||||||
|
),
|
||||||
|
project_id=reusable_history.project_id,
|
||||||
|
)
|
||||||
|
if preferred_project_id and reusable_history is not None:
|
||||||
|
project_id = reusable_history.project_id
|
||||||
|
elif reusable_history and manager.get_open_pull_request(project_id=reusable_history.project_id):
|
||||||
|
project_id = reusable_history.project_id
|
||||||
|
else:
|
||||||
|
project_id = _build_project_id(request.name)
|
||||||
|
reusable_history = None
|
||||||
|
resolved_prompt_text = prompt_text or _compose_prompt_text(request)
|
||||||
|
orchestrator = AgentOrchestrator(
|
||||||
|
project_id=project_id,
|
||||||
|
project_name=request.name,
|
||||||
|
description=request.description,
|
||||||
|
features=request.features,
|
||||||
|
tech_stack=request.tech_stack,
|
||||||
|
db=db,
|
||||||
|
prompt_text=resolved_prompt_text,
|
||||||
|
prompt_actor=prompt_actor,
|
||||||
|
existing_history=reusable_history,
|
||||||
|
prompt_source_context=prompt_source_context,
|
||||||
|
prompt_routing=prompt_routing,
|
||||||
|
related_issue_hint=related_issue,
|
||||||
|
)
|
||||||
|
result = await orchestrator.run()
|
||||||
|
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
manager.log_system_event(
|
||||||
|
component='api',
|
||||||
|
level='INFO' if result['status'] == 'completed' else 'ERROR',
|
||||||
|
message=f"Generated project {project_id} with {len(result.get('changed_files', []))} artifact(s)",
|
||||||
|
)
|
||||||
|
|
||||||
|
history = manager.get_project_by_id(project_id)
|
||||||
|
project_logs = manager.get_project_logs(history.id)
|
||||||
|
response_data = _serialize_project(history)
|
||||||
|
response_data['logs'] = [_serialize_project_log(log) for log in project_logs]
|
||||||
|
response_data['ui_data'] = result.get('ui_data')
|
||||||
|
response_data['features'] = request.features
|
||||||
|
response_data['tech_stack'] = request.tech_stack
|
||||||
|
response_data['project_root'] = result.get('project_root', str(_project_root(project_id)))
|
||||||
|
response_data['changed_files'] = result.get('changed_files', [])
|
||||||
|
response_data['repository'] = result.get('repository')
|
||||||
|
response_data['related_issue'] = result.get('related_issue') or (result.get('ui_data') or {}).get('related_issue')
|
||||||
|
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
|
||||||
|
summary_context = {
|
||||||
|
'name': response_data['name'],
|
||||||
|
'description': response_data['description'],
|
||||||
|
'features': response_data['features'],
|
||||||
|
'tech_stack': response_data['tech_stack'],
|
||||||
|
'changed_files': response_data['changed_files'],
|
||||||
|
'repository_url': (
|
||||||
|
(response_data.get('repository') or {}).get('url')
|
||||||
|
if isinstance(response_data.get('repository'), dict)
|
||||||
|
and (response_data.get('repository') or {}).get('status') in {'created', 'exists', 'ready', 'shared'}
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
'repository_status': (response_data.get('repository') or {}).get('status') if isinstance(response_data.get('repository'), dict) else None,
|
||||||
|
'pull_request_url': (response_data.get('pull_request') or {}).get('pr_url') if isinstance(response_data.get('pull_request'), dict) else None,
|
||||||
|
'pull_request_state': (response_data.get('pull_request') or {}).get('pr_state') if isinstance(response_data.get('pull_request'), dict) else None,
|
||||||
|
'related_issue': response_data.get('related_issue'),
|
||||||
|
'message': response_data.get('message'),
|
||||||
|
'logs': [log.get('message', '') for log in response_data.get('logs', []) if isinstance(log, dict)],
|
||||||
|
}
|
||||||
|
summary_message, summary_trace = await ChangeSummaryGenerator().summarize_with_trace(summary_context)
|
||||||
|
if orchestrator.db_manager and orchestrator.history and orchestrator.prompt_audit:
|
||||||
|
orchestrator.db_manager.log_llm_trace(
|
||||||
|
project_id=project_id,
|
||||||
|
history_id=orchestrator.history.id,
|
||||||
|
prompt_id=orchestrator.prompt_audit.id,
|
||||||
|
stage=summary_trace['stage'],
|
||||||
|
provider=summary_trace['provider'],
|
||||||
|
model=summary_trace['model'],
|
||||||
|
system_prompt=summary_trace['system_prompt'],
|
||||||
|
user_prompt=summary_trace['user_prompt'],
|
||||||
|
assistant_response=summary_trace['assistant_response'],
|
||||||
|
raw_response=summary_trace.get('raw_response'),
|
||||||
|
fallback_used=summary_trace.get('fallback_used', False),
|
||||||
|
)
|
||||||
|
response_data['summary_message'] = summary_message
|
||||||
|
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
|
||||||
|
return {'status': result['status'], 'data': response_data, 'summary_message': summary_message}
|
||||||
|
|
||||||
|
|
||||||
|
def _project_root(project_id: str) -> Path:
|
||||||
|
"""Resolve the filesystem location for a generated project."""
|
||||||
|
return database_module.settings.projects_root / project_id
|
||||||
|
|
||||||
|
|
||||||
|
def _create_gitea_api():
|
||||||
|
"""Create a configured Gitea client or raise an HTTP error if unavailable."""
|
||||||
|
if not database_module.settings.gitea_url or not database_module.settings.gitea_token:
|
||||||
|
raise HTTPException(status_code=400, detail='Gitea integration is not configured')
|
||||||
|
try:
|
||||||
|
from .agents.gitea import GiteaAPI
|
||||||
|
except ImportError:
|
||||||
|
from agents.gitea import GiteaAPI
|
||||||
|
return GiteaAPI(
|
||||||
|
token=database_module.settings.GITEA_TOKEN,
|
||||||
|
base_url=database_module.settings.GITEA_URL,
|
||||||
|
owner=database_module.settings.GITEA_OWNER,
|
||||||
|
repo=database_module.settings.GITEA_REPO or '',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_n8n_api_url(explicit_url: str | None = None) -> str:
|
||||||
|
"""Resolve the effective n8n API URL from explicit input or settings."""
|
||||||
|
if explicit_url and explicit_url.strip():
|
||||||
|
return explicit_url.strip()
|
||||||
|
if database_module.settings.n8n_api_url:
|
||||||
|
return database_module.settings.n8n_api_url
|
||||||
|
webhook_url = database_module.settings.n8n_webhook_url
|
||||||
|
if webhook_url:
|
||||||
|
return webhook_url.split("/webhook", 1)[0].rstrip("/")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/api')
|
||||||
|
def read_api_info():
|
||||||
|
"""Return service metadata for API clients."""
|
||||||
|
return {
|
||||||
|
'service': 'AI Software Factory',
|
||||||
|
'version': __version__,
|
||||||
|
'endpoints': [
|
||||||
|
'/',
|
||||||
|
'/api',
|
||||||
|
'/health',
|
||||||
|
'/generate',
|
||||||
|
'/generate/text',
|
||||||
|
'/projects',
|
||||||
|
'/status/{project_id}',
|
||||||
|
'/audit/projects',
|
||||||
|
'/audit/logs',
|
||||||
|
'/audit/system/logs',
|
||||||
|
'/audit/prompts',
|
||||||
|
'/audit/changes',
|
||||||
|
'/audit/issues',
|
||||||
|
'/audit/commit-context',
|
||||||
|
'/audit/timeline',
|
||||||
|
'/audit/llm-traces',
|
||||||
|
'/audit/pull-requests',
|
||||||
|
'/audit/lineage',
|
||||||
|
'/audit/correlations',
|
||||||
|
'/projects/{project_id}/archive',
|
||||||
|
'/projects/{project_id}/unarchive',
|
||||||
|
'/projects/{project_id}',
|
||||||
|
'/projects/{project_id}/prompts/{prompt_id}/undo',
|
||||||
|
'/projects/{project_id}/sync-repository',
|
||||||
|
'/gitea/repos',
|
||||||
|
'/gitea/repos/onboard',
|
||||||
|
'/n8n/health',
|
||||||
|
'/n8n/setup',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/health')
|
||||||
|
def health_check():
|
||||||
|
"""Health check endpoint."""
|
||||||
|
runtime = database_module.get_database_runtime_summary()
|
||||||
|
return {
|
||||||
|
'status': 'healthy',
|
||||||
|
'database': runtime['backend'],
|
||||||
|
'database_target': runtime['target'],
|
||||||
|
'database_name': runtime['database'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/generate')
|
||||||
|
async def generate_software(request: SoftwareRequest, db: DbSession):
|
||||||
|
"""Create and record a software-generation request."""
|
||||||
|
return await _run_generation(request, db)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/generate/text')
|
||||||
|
async def generate_software_from_text(request: FreeformSoftwareRequest, db: DbSession):
|
||||||
|
"""Interpret a free-form request and run generation."""
|
||||||
|
if (
|
||||||
|
request.source == 'telegram'
|
||||||
|
and database_module.settings.telegram_chat_id
|
||||||
|
and request.chat_id
|
||||||
|
and str(request.chat_id) != str(database_module.settings.telegram_chat_id)
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
'status': 'ignored',
|
||||||
|
'message': f"Ignoring Telegram message from chat {request.chat_id}",
|
||||||
|
'source': {
|
||||||
|
'type': request.source,
|
||||||
|
'chat_id': request.chat_id,
|
||||||
|
'chat_type': request.chat_type,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
interpreter_context = manager.get_interpreter_context(chat_id=request.chat_id, source=request.source)
|
||||||
|
interpreted, interpretation_trace = await RequestInterpreter().interpret_with_trace(
|
||||||
|
request.prompt_text,
|
||||||
|
context=interpreter_context,
|
||||||
|
)
|
||||||
|
routing = interpretation_trace.get('routing') or {}
|
||||||
|
selected_history = manager.get_project_by_id(routing.get('project_id'), include_archived=False) if routing.get('project_id') else None
|
||||||
|
if selected_history is not None and routing.get('intent') != 'new_project':
|
||||||
|
interpreted['name'] = selected_history.project_name
|
||||||
|
interpreted['description'] = selected_history.description or interpreted['description']
|
||||||
|
structured_request = SoftwareRequest(**interpreted)
|
||||||
|
response = await _run_generation(
|
||||||
|
structured_request,
|
||||||
|
db,
|
||||||
|
prompt_text=request.prompt_text,
|
||||||
|
prompt_actor=request.source,
|
||||||
|
prompt_source_context={
|
||||||
|
'chat_id': request.chat_id,
|
||||||
|
'chat_type': request.chat_type,
|
||||||
|
},
|
||||||
|
prompt_routing=routing,
|
||||||
|
preferred_project_id=routing.get('project_id') if routing.get('intent') != 'new_project' else None,
|
||||||
|
related_issue={'number': routing.get('issue_number')} if routing.get('issue_number') is not None else None,
|
||||||
|
)
|
||||||
|
project_data = response.get('data', {})
|
||||||
|
if project_data.get('history_id') is not None:
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
prompts = manager.get_prompt_events(project_id=project_data.get('project_id'))
|
||||||
|
prompt_id = prompts[0]['id'] if prompts else None
|
||||||
|
manager.log_llm_trace(
|
||||||
|
project_id=project_data.get('project_id'),
|
||||||
|
history_id=project_data.get('history_id'),
|
||||||
|
prompt_id=prompt_id,
|
||||||
|
stage=interpretation_trace['stage'],
|
||||||
|
provider=interpretation_trace['provider'],
|
||||||
|
model=interpretation_trace['model'],
|
||||||
|
system_prompt=interpretation_trace['system_prompt'],
|
||||||
|
user_prompt=interpretation_trace['user_prompt'],
|
||||||
|
assistant_response=interpretation_trace['assistant_response'],
|
||||||
|
raw_response=interpretation_trace.get('raw_response'),
|
||||||
|
fallback_used=interpretation_trace.get('fallback_used', False),
|
||||||
|
)
|
||||||
|
response['interpreted_request'] = interpreted
|
||||||
|
response['routing'] = routing
|
||||||
|
response['llm_trace'] = interpretation_trace
|
||||||
|
response['source'] = {
|
||||||
|
'type': request.source,
|
||||||
|
'chat_id': request.chat_id,
|
||||||
|
'chat_type': request.chat_type,
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/projects')
|
||||||
|
def list_projects(
|
||||||
|
db: DbSession,
|
||||||
|
include_archived: bool = Query(default=False),
|
||||||
|
archived_only: bool = Query(default=False),
|
||||||
|
):
|
||||||
|
"""List recorded projects."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
projects = manager.get_all_projects(include_archived=include_archived, archived_only=archived_only)
|
||||||
|
return {'projects': [_serialize_project(project) for project in projects]}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/status/{project_id}')
|
||||||
|
def get_project_status(project_id: str, db: DbSession):
|
||||||
|
"""Get the current status for a single project."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
history = manager.get_project_by_id(project_id)
|
||||||
|
if history is None:
|
||||||
|
raise HTTPException(status_code=404, detail='Project not found')
|
||||||
|
return _serialize_project(history)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/projects')
|
||||||
|
def get_audit_projects(db: DbSession):
|
||||||
|
"""Return projects together with their related logs and audit data."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
projects = []
|
||||||
|
for history in manager.get_all_projects():
|
||||||
|
project_data = _serialize_project(history)
|
||||||
|
audit_data = manager.get_project_audit_data(history.project_id)
|
||||||
|
project_data['logs'] = audit_data['logs']
|
||||||
|
project_data['actions'] = audit_data['actions']
|
||||||
|
project_data['audit_trail'] = audit_data['audit_trail']
|
||||||
|
projects.append(project_data)
|
||||||
|
return {'projects': projects}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/prompts')
|
||||||
|
def get_prompt_audit(db: DbSession, project_id: str | None = Query(default=None)):
|
||||||
|
"""Return stored prompt submissions."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'prompts': [_serialize_audit_item(item) for item in manager.get_prompt_events(project_id=project_id)]}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/changes')
|
||||||
|
def get_code_change_audit(db: DbSession, project_id: str | None = Query(default=None)):
|
||||||
|
"""Return recorded code changes."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'changes': [_serialize_audit_item(item) for item in manager.get_code_changes(project_id=project_id)]}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/issues')
|
||||||
|
def get_issue_audit(
|
||||||
|
db: DbSession,
|
||||||
|
project_id: str | None = Query(default=None),
|
||||||
|
state: str | None = Query(default=None),
|
||||||
|
):
|
||||||
|
"""Return tracked repository issues and issue-work events."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {
|
||||||
|
'issues': manager.get_repository_issues(project_id=project_id, state=state),
|
||||||
|
'issue_work': manager.get_issue_work_events(project_id=project_id),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/commit-context')
|
||||||
|
def get_commit_context_audit(
|
||||||
|
db: DbSession,
|
||||||
|
commit_hash: str = Query(min_length=4),
|
||||||
|
project_id: str | None = Query(default=None),
|
||||||
|
branch_scope: str | None = Query(default=None, pattern='^(main|pr|manual)?$'),
|
||||||
|
):
|
||||||
|
"""Return the recorded context explaining how a commit came to be."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
context = manager.get_commit_context(commit_hash=commit_hash, project_id=project_id, branch_scope=branch_scope)
|
||||||
|
if context is None:
|
||||||
|
raise HTTPException(status_code=404, detail='Commit context not found')
|
||||||
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/timeline')
|
||||||
|
def get_project_timeline_audit(
|
||||||
|
db: DbSession,
|
||||||
|
project_id: str = Query(min_length=1),
|
||||||
|
branch_scope: str | None = Query(default=None, pattern='^(main|pr|manual)?$'),
|
||||||
|
):
|
||||||
|
"""Return the mixed audit timeline for one project."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'timeline': manager.get_project_timeline(project_id=project_id, branch_scope=branch_scope)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/llm-traces')
|
||||||
|
def get_llm_trace_audit(
|
||||||
|
db: DbSession,
|
||||||
|
project_id: str | None = Query(default=None),
|
||||||
|
prompt_id: int | None = Query(default=None),
|
||||||
|
stage: str | None = Query(default=None),
|
||||||
|
model: str | None = Query(default=None),
|
||||||
|
search: str | None = Query(default=None),
|
||||||
|
):
|
||||||
|
"""Return persisted LLM traces."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {
|
||||||
|
'llm_traces': manager.get_llm_traces(
|
||||||
|
project_id=project_id,
|
||||||
|
prompt_id=prompt_id,
|
||||||
|
stage=stage,
|
||||||
|
model=model,
|
||||||
|
search_query=search,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/lineage')
|
||||||
|
def get_prompt_change_lineage(db: DbSession, project_id: str | None = Query(default=None)):
|
||||||
|
"""Return explicit prompt-to-code lineage rows."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'lineage': manager.get_prompt_change_links(project_id=project_id)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/correlations')
|
||||||
|
def get_prompt_change_correlations(db: DbSession, project_id: str | None = Query(default=None)):
|
||||||
|
"""Return prompt-to-change correlations for generated projects."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'correlations': manager.get_prompt_change_correlations(project_id=project_id)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/pull-requests')
|
||||||
|
def get_pull_request_audit(db: DbSession, project_id: str | None = Query(default=None), open_only: bool = Query(default=False)):
|
||||||
|
"""Return tracked pull requests for generated projects."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
return {'pull_requests': manager.get_pull_requests(project_id=project_id, only_open=open_only)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/projects/{project_id}/prompts/{prompt_id}/undo')
|
||||||
|
async def undo_prompt_changes(project_id: str, prompt_id: int, db: DbSession):
|
||||||
|
"""Undo all changes associated with a specific prompt."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
history = manager.get_project_by_id(project_id)
|
||||||
|
if history is None:
|
||||||
|
raise HTTPException(status_code=404, detail='Project not found')
|
||||||
|
if history.status == 'archived':
|
||||||
|
raise HTTPException(status_code=400, detail='Archived projects cannot be modified')
|
||||||
|
result = await PromptWorkflowManager(db).undo_prompt(project_id=project_id, prompt_id=prompt_id)
|
||||||
|
if result.get('status') == 'error':
|
||||||
|
raise HTTPException(status_code=400, detail=result.get('message', 'Undo failed'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/projects/{project_id}/archive')
|
||||||
|
def archive_project(project_id: str, db: DbSession):
|
||||||
|
"""Archive a project so it no longer participates in active automation."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
result = manager.archive_project(project_id)
|
||||||
|
if result.get('status') == 'error':
|
||||||
|
raise HTTPException(status_code=404, detail=result.get('message', 'Archive failed'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/projects/{project_id}/unarchive')
|
||||||
|
def unarchive_project(project_id: str, db: DbSession):
|
||||||
|
"""Restore an archived project back into the active automation set."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
result = manager.unarchive_project(project_id)
|
||||||
|
if result.get('status') == 'error':
|
||||||
|
raise HTTPException(status_code=404, detail=result.get('message', 'Restore failed'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete('/projects/{project_id}')
|
||||||
|
def delete_project(project_id: str, db: DbSession):
|
||||||
|
"""Delete a project, its local project directory, and project-scoped DB traces."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
audit_data = manager.get_project_audit_data(project_id)
|
||||||
|
if audit_data.get('project') is None:
|
||||||
|
raise HTTPException(status_code=404, detail='Project not found')
|
||||||
|
|
||||||
|
repository = audit_data.get('repository') or audit_data['project'].get('repository') or {}
|
||||||
|
remote_delete = None
|
||||||
|
if repository and repository.get('mode') != 'shared' and repository.get('owner') and repository.get('name') and database_module.settings.gitea_url and database_module.settings.gitea_token:
|
||||||
|
remote_delete = _create_gitea_api().delete_repo_sync(owner=repository.get('owner'), repo=repository.get('name'))
|
||||||
|
if remote_delete.get('error') and remote_delete.get('status_code') not in {404, None}:
|
||||||
|
raise HTTPException(status_code=502, detail=remote_delete.get('error'))
|
||||||
|
|
||||||
|
result = manager.delete_project(project_id)
|
||||||
|
if result.get('status') == 'error':
|
||||||
|
raise HTTPException(status_code=400, detail=result.get('message', 'Project deletion failed'))
|
||||||
|
result['remote_repository_deleted'] = bool(remote_delete and not remote_delete.get('error'))
|
||||||
|
result['remote_repository'] = repository if repository else None
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/projects/{project_id}/sync-repository')
|
||||||
|
def sync_project_repository(project_id: str, db: DbSession, commit_limit: int = Query(default=25, ge=1, le=200)):
|
||||||
|
"""Import recent repository activity from Gitea for a tracked project."""
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
history = manager.get_project_by_id(project_id)
|
||||||
|
if history is None:
|
||||||
|
raise HTTPException(status_code=404, detail='Project not found')
|
||||||
|
if history.status == 'archived':
|
||||||
|
raise HTTPException(status_code=400, detail='Archived projects cannot be synced')
|
||||||
|
gitea_api = _create_gitea_api()
|
||||||
|
result = manager.sync_repository_activity(project_id=project_id, gitea_api=gitea_api, commit_limit=commit_limit)
|
||||||
|
if result.get('status') == 'error':
|
||||||
|
raise HTTPException(status_code=400, detail=result.get('message', 'Repository sync failed'))
|
||||||
|
manager.sync_repository_issues(project_id=project_id, gitea_api=gitea_api, state='open')
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/gitea/repos')
|
||||||
|
def list_gitea_repositories(db: DbSession, owner: str | None = Query(default=None)):
|
||||||
|
"""List repositories in the configured Gitea organization and whether they are already onboarded."""
|
||||||
|
gitea_api = _create_gitea_api()
|
||||||
|
resolved_owner = owner or database_module.settings.gitea_owner
|
||||||
|
repos = gitea_api.list_repositories_sync(owner=resolved_owner)
|
||||||
|
if isinstance(repos, dict) and repos.get('error'):
|
||||||
|
raise HTTPException(status_code=502, detail=repos.get('error'))
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
items = []
|
||||||
|
for repo in repos if isinstance(repos, list) else []:
|
||||||
|
tracked_project = manager.get_project_by_repository(resolved_owner, repo.get('name', ''))
|
||||||
|
items.append(
|
||||||
|
{
|
||||||
|
'name': repo.get('name'),
|
||||||
|
'full_name': repo.get('full_name') or f"{resolved_owner}/{repo.get('name')}",
|
||||||
|
'description': repo.get('description'),
|
||||||
|
'html_url': repo.get('html_url'),
|
||||||
|
'clone_url': repo.get('clone_url'),
|
||||||
|
'default_branch': repo.get('default_branch'),
|
||||||
|
'private': bool(repo.get('private', False)),
|
||||||
|
'onboarded': tracked_project is not None,
|
||||||
|
'project_id': tracked_project.project_id if tracked_project is not None else None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return {'repositories': items}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/gitea/repos/onboard')
|
||||||
|
async def onboard_gitea_repository(request: GiteaRepositoryOnboardRequest, db: DbSession):
|
||||||
|
"""Onboard a manually created Gitea repository into the factory dashboard."""
|
||||||
|
gitea_api = _create_gitea_api()
|
||||||
|
owner = request.owner or database_module.settings.gitea_owner
|
||||||
|
repo = await gitea_api.get_repo_info(owner=owner, repo=request.repo_name)
|
||||||
|
if isinstance(repo, dict) and repo.get('error'):
|
||||||
|
raise HTTPException(status_code=404, detail=repo.get('error'))
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
onboarded = manager.onboard_repository(owner=owner, repo_name=request.repo_name, repository_data=repo)
|
||||||
|
manager.sync_repository_issues(project_id=onboarded['project_id'], gitea_api=gitea_api, state='open')
|
||||||
|
sync_result = None
|
||||||
|
if request.sync_commits:
|
||||||
|
sync_result = manager.sync_repository_activity(
|
||||||
|
project_id=onboarded['project_id'],
|
||||||
|
gitea_api=gitea_api,
|
||||||
|
commit_limit=request.commit_limit,
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'onboarded': onboarded,
|
||||||
|
'sync_result': sync_result,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/logs')
|
||||||
|
def get_audit_logs(db: DbSession):
|
||||||
|
"""Return all project logs ordered newest first."""
|
||||||
|
logs = db.query(ProjectLog).order_by(ProjectLog.id.desc()).all()
|
||||||
|
return {'logs': [_serialize_project_log(log) for log in logs]}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/audit/system/logs')
|
||||||
|
def get_system_audit_logs(
|
||||||
|
db: DbSession,
|
||||||
|
component: str | None = Query(default=None),
|
||||||
|
):
|
||||||
|
"""Return system logs with optional component filtering."""
|
||||||
|
query = db.query(SystemLog).order_by(SystemLog.id.desc())
|
||||||
|
if component:
|
||||||
|
query = query.filter(SystemLog.component == component)
|
||||||
|
return {'logs': [_serialize_system_log(log) for log in query.all()]}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/n8n/health')
|
||||||
|
async def get_n8n_health():
|
||||||
|
"""Check whether the configured n8n instance is reachable."""
|
||||||
|
api_url = _resolve_n8n_api_url()
|
||||||
|
if not api_url:
|
||||||
|
return {
|
||||||
|
'status': 'error',
|
||||||
|
'message': 'N8N_API_URL or N8N_WEBHOOK_URL is not configured.',
|
||||||
|
'api_url': '',
|
||||||
|
'auth_configured': bool(database_module.settings.n8n_api_key),
|
||||||
|
'checks': [],
|
||||||
|
'suggestion': 'Set N8N_API_URL to the base n8n address before provisioning workflows.',
|
||||||
|
}
|
||||||
|
agent = N8NSetupAgent(api_url=api_url, webhook_token=database_module.settings.n8n_api_key)
|
||||||
|
return await agent.health_check()
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/n8n/setup')
|
||||||
|
async def setup_n8n_workflow(request: N8NSetupRequest, db: DbSession):
|
||||||
|
"""Create or update the n8n Telegram workflow."""
|
||||||
|
api_url = _resolve_n8n_api_url(request.api_url)
|
||||||
|
if not api_url:
|
||||||
|
raise HTTPException(status_code=400, detail='n8n API URL is not configured')
|
||||||
|
|
||||||
|
agent = N8NSetupAgent(
|
||||||
|
api_url=api_url,
|
||||||
|
webhook_token=(request.api_key or database_module.settings.n8n_api_key),
|
||||||
|
)
|
||||||
|
result = await agent.setup(
|
||||||
|
webhook_path=request.webhook_path,
|
||||||
|
backend_url=request.backend_url or f"{database_module.settings.backend_public_url}/generate/text",
|
||||||
|
force_update=request.force_update,
|
||||||
|
telegram_bot_token=database_module.settings.telegram_bot_token,
|
||||||
|
telegram_credential_name=database_module.settings.n8n_telegram_credential_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = DatabaseManager(db)
|
||||||
|
log_level = 'INFO' if result.get('status') != 'error' else 'ERROR'
|
||||||
|
manager.log_system_event(
|
||||||
|
component='n8n',
|
||||||
|
level=log_level,
|
||||||
|
message=result.get('message', json.dumps(result)),
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
@app.post('/init-db')
|
@app.post('/init-db')
|
||||||
def initialize_database():
|
def initialize_database():
|
||||||
"""Initialize database tables (POST endpoint for NiceGUI to call before dashboard)."""
|
"""Initialize database tables (POST endpoint for NiceGUI to call before dashboard)."""
|
||||||
try:
|
try:
|
||||||
init_db()
|
database_module.init_db()
|
||||||
return {'message': 'Database tables created successfully', 'status': 'success'}
|
return {'message': 'Database tables created successfully', 'status': 'success'}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'message': f'Error initializing database: {str(e)}', 'status': 'error'}
|
return {'message': f'Error initializing database: {str(e)}', 'status': 'error'}
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ from sqlalchemy import (
|
|||||||
)
|
)
|
||||||
from sqlalchemy.orm import relationship, declarative_base
|
from sqlalchemy.orm import relationship, declarative_base
|
||||||
|
|
||||||
from config import settings
|
try:
|
||||||
|
from .config import settings
|
||||||
|
except ImportError:
|
||||||
|
from config import settings
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -52,6 +55,7 @@ class ProjectHistory(Base):
|
|||||||
ui_snapshots = relationship("UISnapshot", back_populates="project_history", cascade="all, delete-orphan")
|
ui_snapshots = relationship("UISnapshot", back_populates="project_history", cascade="all, delete-orphan")
|
||||||
pull_requests = relationship("PullRequest", back_populates="project_history", cascade="all, delete-orphan")
|
pull_requests = relationship("PullRequest", back_populates="project_history", cascade="all, delete-orphan")
|
||||||
pull_request_data = relationship("PullRequestData", back_populates="project_history", cascade="all, delete-orphan")
|
pull_request_data = relationship("PullRequestData", back_populates="project_history", cascade="all, delete-orphan")
|
||||||
|
prompt_code_links = relationship("PromptCodeLink", back_populates="project_history", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
|
||||||
class ProjectLog(Base):
|
class ProjectLog(Base):
|
||||||
@@ -145,6 +149,22 @@ class AuditTrail(Base):
|
|||||||
metadata_json = Column(JSON, nullable=True)
|
metadata_json = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
|
class PromptCodeLink(Base):
|
||||||
|
"""Explicit lineage between a prompt event and a resulting code change."""
|
||||||
|
__tablename__ = "prompt_code_links"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||||
|
project_id = Column(String(255), nullable=False)
|
||||||
|
prompt_audit_id = Column(Integer, nullable=False)
|
||||||
|
code_change_audit_id = Column(Integer, nullable=False)
|
||||||
|
file_path = Column(String(500), nullable=True)
|
||||||
|
change_type = Column(String(50), nullable=True)
|
||||||
|
created_at = Column(DateTime, default=datetime.utcnow)
|
||||||
|
|
||||||
|
project_history = relationship("ProjectHistory", back_populates="prompt_code_links")
|
||||||
|
|
||||||
|
|
||||||
class UserAction(Base):
|
class UserAction(Base):
|
||||||
"""User action audit entries."""
|
"""User action audit entries."""
|
||||||
__tablename__ = "user_actions"
|
__tablename__ = "user_actions"
|
||||||
|
|||||||
@@ -16,3 +16,6 @@ flake8==6.1.0
|
|||||||
mypy==1.7.1
|
mypy==1.7.1
|
||||||
httpx==0.25.2
|
httpx==0.25.2
|
||||||
nicegui==3.9.0
|
nicegui==3.9.0
|
||||||
|
aiohttp>=3.9.0
|
||||||
|
pytest-asyncio>=0.23.0
|
||||||
|
alembic>=1.14.0
|
||||||
@@ -1,385 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>AI Software Factory Dashboard</title>
|
|
||||||
<style>
|
|
||||||
* {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
|
||||||
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
|
|
||||||
min-height: 100vh;
|
|
||||||
color: #fff;
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dashboard {
|
|
||||||
max-width: 1200px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header {
|
|
||||||
text-align: center;
|
|
||||||
padding: 30px;
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header h1 {
|
|
||||||
font-size: 2.5em;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
|
||||||
-webkit-background-clip: text;
|
|
||||||
-webkit-text-fill-color: transparent;
|
|
||||||
background-clip: text;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header p {
|
|
||||||
color: #888;
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stats-grid {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
|
|
||||||
gap: 20px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-card {
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
padding: 25px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-card h3 {
|
|
||||||
font-size: 0.9em;
|
|
||||||
color: #888;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-card .value {
|
|
||||||
font-size: 2.5em;
|
|
||||||
font-weight: bold;
|
|
||||||
color: #00d4ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-card.project .value { color: #00ff88; }
|
|
||||||
.stat-card.active .value { color: #ff6b6b; }
|
|
||||||
.stat-card.code .value { color: #ffd93d; }
|
|
||||||
|
|
||||||
.status-panel {
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
padding: 25px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-panel h2 {
|
|
||||||
font-size: 1.3em;
|
|
||||||
margin-bottom: 15px;
|
|
||||||
color: #00d4ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-bar {
|
|
||||||
height: 20px;
|
|
||||||
background: #2a2a4a;
|
|
||||||
border-radius: 10px;
|
|
||||||
overflow: hidden;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-fill {
|
|
||||||
height: 100%;
|
|
||||||
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
|
||||||
border-radius: 10px;
|
|
||||||
transition: width 0.5s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.message {
|
|
||||||
padding: 10px;
|
|
||||||
background: rgba(0, 212, 255, 0.1);
|
|
||||||
border-radius: 8px;
|
|
||||||
border-left: 4px solid #00d4ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
.projects-section {
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
padding: 25px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.projects-section h2 {
|
|
||||||
font-size: 1.3em;
|
|
||||||
margin-bottom: 15px;
|
|
||||||
color: #00ff88;
|
|
||||||
}
|
|
||||||
|
|
||||||
.projects-list {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-item {
|
|
||||||
background: rgba(0, 255, 136, 0.1);
|
|
||||||
padding: 15px 20px;
|
|
||||||
border-radius: 10px;
|
|
||||||
border: 1px solid rgba(0, 255, 136, 0.3);
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.project-item.active {
|
|
||||||
background: rgba(255, 107, 107, 0.1);
|
|
||||||
border-color: rgba(255, 107, 107, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-section {
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
padding: 25px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-section h2 {
|
|
||||||
font-size: 1.3em;
|
|
||||||
margin-bottom: 15px;
|
|
||||||
color: #ffd93d;
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-table {
|
|
||||||
width: 100%;
|
|
||||||
border-collapse: collapse;
|
|
||||||
margin-top: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-table th, .audit-table td {
|
|
||||||
padding: 12px;
|
|
||||||
text-align: left;
|
|
||||||
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-table th {
|
|
||||||
color: #888;
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 0.85em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-table td {
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.audit-table .timestamp {
|
|
||||||
color: #666;
|
|
||||||
font-size: 0.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions-panel {
|
|
||||||
background: rgba(255, 255, 255, 0.05);
|
|
||||||
border-radius: 15px;
|
|
||||||
padding: 25px;
|
|
||||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions-panel h2 {
|
|
||||||
font-size: 1.3em;
|
|
||||||
margin-bottom: 15px;
|
|
||||||
color: #ff6b6b;
|
|
||||||
}
|
|
||||||
|
|
||||||
.actions-panel p {
|
|
||||||
color: #888;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.loading {
|
|
||||||
text-align: center;
|
|
||||||
padding: 50px;
|
|
||||||
color: #888;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.stats-grid {
|
|
||||||
grid-template-columns: 1fr;
|
|
||||||
}
|
|
||||||
|
|
||||||
.projects-list {
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="dashboard">
|
|
||||||
<div class="header">
|
|
||||||
<h1>🚀 AI Software Factory</h1>
|
|
||||||
<p>Real-time Dashboard & Audit Trail Display</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="stats-grid">
|
|
||||||
<div class="stat-card project">
|
|
||||||
<h3>Current Project</h3>
|
|
||||||
<div class="value" id="project-name">Loading...</div>
|
|
||||||
</div>
|
|
||||||
<div class="stat-card active">
|
|
||||||
<h3>Active Projects</h3>
|
|
||||||
<div class="value" id="active-projects">0</div>
|
|
||||||
</div>
|
|
||||||
<div class="stat-card code">
|
|
||||||
<h3>Total Projects</h3>
|
|
||||||
<div class="value" id="total-projects">0</div>
|
|
||||||
</div>
|
|
||||||
<div class="stat-card">
|
|
||||||
<h3>Status</h3>
|
|
||||||
<div class="value" id="status-value">Loading...</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="status-panel">
|
|
||||||
<h2>📊 Current Status</h2>
|
|
||||||
<div class="status-bar">
|
|
||||||
<div class="status-fill" id="status-fill" style="width: 0%"></div>
|
|
||||||
</div>
|
|
||||||
<div class="message" id="status-message">Loading...</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="projects-section">
|
|
||||||
<h2>📁 Active Projects</h2>
|
|
||||||
<div class="projects-list" id="projects-list">
|
|
||||||
<div class="loading">Loading projects...</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="audit-section">
|
|
||||||
<h2>📜 Audit Trail</h2>
|
|
||||||
<table class="audit-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Timestamp</th>
|
|
||||||
<th>Agent</th>
|
|
||||||
<th>Action</th>
|
|
||||||
<th>Status</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="audit-trail-body">
|
|
||||||
<tr>
|
|
||||||
<td class="timestamp">Loading...</td>
|
|
||||||
<td>-</td>
|
|
||||||
<td>-</td>
|
|
||||||
<td>-</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="actions-panel">
|
|
||||||
<h2>⚙️ System Actions</h2>
|
|
||||||
<p id="actions-message">Dashboard is rendering successfully.</p>
|
|
||||||
<p style="color: #888; font-size: 0.9em;">This dashboard is powered by the AI Software Factory and displays real-time status updates, audit trails, and project information.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
// Fetch data from API
|
|
||||||
async function loadDashboardData() {
|
|
||||||
try {
|
|
||||||
// Load projects
|
|
||||||
const projectsResponse = await fetch('/projects');
|
|
||||||
const projectsData = await projectsResponse.json();
|
|
||||||
updateProjects(projectsData.projects);
|
|
||||||
|
|
||||||
// Get latest active project
|
|
||||||
const activeProject = projectsData.projects.find(p => p.status === 'RUNNING' || p.status === 'IN_PROGRESS');
|
|
||||||
|
|
||||||
if (activeProject) {
|
|
||||||
document.getElementById('project-name').textContent = activeProject.project_name || activeProject.project_id;
|
|
||||||
updateStatusPanel(activeProject);
|
|
||||||
|
|
||||||
// Load audit trail for this project
|
|
||||||
const auditResponse = await fetch(`/audit/trail?limit=10`);
|
|
||||||
const auditData = await auditResponse.json();
|
|
||||||
updateAuditTrail(auditData.audit_trail);
|
|
||||||
} else {
|
|
||||||
// No active project, show all projects
|
|
||||||
document.getElementById('projects-list').innerHTML = projectsData.projects.map(p =>
|
|
||||||
`<div class="project-item ${p.status === 'RUNNING' || p.status === 'IN_PROGRESS' ? 'active' : ''}">
|
|
||||||
<strong>${p.project_name || p.project_id}</strong> • ${p.status} • ${p.progress || 0}%
|
|
||||||
</div>`
|
|
||||||
).join('');
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error loading dashboard data:', error);
|
|
||||||
document.getElementById('status-message').innerHTML =
|
|
||||||
`<strong>Error:</strong> Failed to load dashboard data. Please check the console for details.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateProjects(projects) {
|
|
||||||
const activeProjects = projects.filter(p => p.status === 'RUNNING' || p.status === 'IN_PROGRESS' || p.status === 'COMPLETED').length;
|
|
||||||
document.getElementById('active-projects').textContent = activeProjects;
|
|
||||||
document.getElementById('total-projects').textContent = projects.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateStatusPanel(project) {
|
|
||||||
const progress = project.progress || 0;
|
|
||||||
document.getElementById('status-fill').style.width = progress + '%';
|
|
||||||
document.getElementById('status-message').innerHTML =
|
|
||||||
`<strong>${project.message || 'Project running...'}</strong><br>` +
|
|
||||||
`<span style="color: #888;">Progress: ${progress}%</span>`;
|
|
||||||
document.getElementById('status-value').textContent = project.status;
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateAuditTrail(auditEntries) {
|
|
||||||
if (auditEntries.length === 0) {
|
|
||||||
document.getElementById('audit-trail-body').innerHTML =
|
|
||||||
`<tr><td colspan="4" style="text-align: center; color: #888;">No audit entries yet</td></tr>`;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const formattedEntries = auditEntries.map(entry => ({
|
|
||||||
...entry,
|
|
||||||
timestamp: entry.timestamp ? new Date(entry.timestamp).toLocaleString() : '-'
|
|
||||||
}));
|
|
||||||
|
|
||||||
document.getElementById('audit-trail-body').innerHTML = formattedEntries.map(entry => `
|
|
||||||
<tr>
|
|
||||||
<td class="timestamp">${entry.timestamp}</td>
|
|
||||||
<td>${entry.actor || '-'}</td>
|
|
||||||
<td>${entry.action || entry.details || '-'}</td>
|
|
||||||
<td style="color: ${getStatusColor(entry.action_type || entry.status)};">${entry.action_type || entry.status || '-'}</td>
|
|
||||||
</tr>
|
|
||||||
`).join('');
|
|
||||||
}
|
|
||||||
|
|
||||||
function getStatusColor(status) {
|
|
||||||
if (!status) return '#888';
|
|
||||||
const upper = status.toUpperCase();
|
|
||||||
if (['SUCCESS', 'COMPLETED', 'FINISHED'].includes(upper)) return '#00ff88';
|
|
||||||
if (['IN_PROGRESS', 'RUNNING', 'PENDING'].includes(upper)) return '#00d4ff';
|
|
||||||
if (['ERROR', 'FAILED', 'FAILED'].includes(upper)) return '#ff6b6b';
|
|
||||||
return '#888';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load data when dashboard is ready
|
|
||||||
loadDashboardData();
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
Reference in New Issue
Block a user