feat: add explicit workflow steps and guardrail prompts, refs NOISSUE
This commit is contained in:
@@ -30,6 +30,7 @@ try:
|
||||
from .agents.change_summary import ChangeSummaryGenerator
|
||||
from .agents.database_manager import DatabaseManager
|
||||
from .agents.request_interpreter import RequestInterpreter
|
||||
from .agents.llm_service import LLMServiceClient
|
||||
from .agents.orchestrator import AgentOrchestrator
|
||||
from .agents.n8n_setup import N8NSetupAgent
|
||||
from .agents.prompt_workflow import PromptWorkflowManager
|
||||
@@ -41,6 +42,7 @@ except ImportError:
|
||||
from agents.change_summary import ChangeSummaryGenerator
|
||||
from agents.database_manager import DatabaseManager
|
||||
from agents.request_interpreter import RequestInterpreter
|
||||
from agents.llm_service import LLMServiceClient
|
||||
from agents.orchestrator import AgentOrchestrator
|
||||
from agents.n8n_setup import N8NSetupAgent
|
||||
from agents.prompt_workflow import PromptWorkflowManager
|
||||
@@ -109,6 +111,75 @@ def _build_project_id(name: str) -> str:
|
||||
return f"{slug}-{uuid4().hex[:8]}"
|
||||
|
||||
|
||||
def _build_project_slug(name: str) -> str:
|
||||
"""Normalize a project name into a kebab-case identifier slug."""
|
||||
return PROJECT_ID_PATTERN.sub("-", name.strip().lower()).strip("-") or "project"
|
||||
|
||||
|
||||
def _ensure_unique_identifier(base_slug: str, reserved_ids: set[str]) -> str:
|
||||
"""Return a unique identifier using deterministic numeric suffixes when needed."""
|
||||
normalized = _build_project_slug(base_slug)
|
||||
if normalized not in reserved_ids:
|
||||
return normalized
|
||||
suffix = 2
|
||||
while f"{normalized}-{suffix}" in reserved_ids:
|
||||
suffix += 1
|
||||
return f"{normalized}-{suffix}"
|
||||
|
||||
|
||||
def _build_project_identity_context(manager: DatabaseManager) -> list[dict]:
|
||||
"""Build a compact project catalog for naming stages."""
|
||||
projects = []
|
||||
for history in manager.get_all_projects(include_archived=True):
|
||||
repository = manager._get_project_repository(history) or {}
|
||||
projects.append(
|
||||
{
|
||||
'project_id': history.project_id,
|
||||
'name': history.project_name,
|
||||
'description': history.description,
|
||||
'repository': {
|
||||
'owner': repository.get('owner'),
|
||||
'name': repository.get('name'),
|
||||
},
|
||||
}
|
||||
)
|
||||
return projects
|
||||
|
||||
|
||||
async def _derive_project_id_for_request(
|
||||
request: SoftwareRequest,
|
||||
*,
|
||||
prompt_text: str,
|
||||
prompt_routing: dict | None,
|
||||
existing_projects: list[dict],
|
||||
) -> tuple[str, dict | None]:
|
||||
"""Derive a stable project id for a newly created project."""
|
||||
reserved_ids = {str(project.get('project_id')).strip() for project in existing_projects if project.get('project_id')}
|
||||
fallback_id = _ensure_unique_identifier((prompt_routing or {}).get('project_name') or request.name, reserved_ids)
|
||||
user_prompt = (
|
||||
f"Original user prompt:\n{prompt_text}\n\n"
|
||||
f"Structured request:\n{json.dumps({'name': request.name, 'description': request.description, 'features': request.features, 'tech_stack': request.tech_stack}, indent=2)}\n\n"
|
||||
f"Naming context:\n{json.dumps(prompt_routing or {}, indent=2)}\n\n"
|
||||
f"Reserved project ids:\n{json.dumps(sorted(reserved_ids))}\n\n"
|
||||
"Suggest the best stable project id for this new project."
|
||||
)
|
||||
content, trace = await LLMServiceClient().chat_with_trace(
|
||||
stage='project_id_naming',
|
||||
system_prompt=database_module.settings.llm_project_id_system_prompt,
|
||||
user_prompt=user_prompt,
|
||||
tool_context_input={'projects': existing_projects},
|
||||
expect_json=True,
|
||||
)
|
||||
if content:
|
||||
try:
|
||||
parsed = json.loads(content)
|
||||
candidate = parsed.get('project_id') or parsed.get('slug') or request.name
|
||||
return _ensure_unique_identifier(str(candidate), reserved_ids), trace
|
||||
except Exception:
|
||||
pass
|
||||
return fallback_id, trace
|
||||
|
||||
|
||||
def _serialize_project(history: ProjectHistory) -> dict:
|
||||
"""Serialize a project history row for API responses."""
|
||||
return {
|
||||
@@ -176,13 +247,15 @@ async def _run_generation(
|
||||
prompt_source_context: dict | None = None,
|
||||
prompt_routing: dict | None = None,
|
||||
preferred_project_id: str | None = None,
|
||||
repo_name_override: str | None = None,
|
||||
related_issue: dict | None = None,
|
||||
) -> dict:
|
||||
"""Run the shared generation pipeline for a structured request."""
|
||||
database_module.init_db()
|
||||
|
||||
manager = DatabaseManager(db)
|
||||
reusable_history = manager.get_project_by_id(preferred_project_id, include_archived=False) if preferred_project_id else manager.get_latest_project_by_name(request.name)
|
||||
is_explicit_new_project = (prompt_routing or {}).get('intent') == 'new_project'
|
||||
reusable_history = manager.get_project_by_id(preferred_project_id, include_archived=False) if preferred_project_id else (None if is_explicit_new_project else manager.get_latest_project_by_name(request.name))
|
||||
if reusable_history and database_module.settings.gitea_url and database_module.settings.gitea_token:
|
||||
try:
|
||||
from .agents.gitea import GiteaAPI
|
||||
@@ -197,14 +270,23 @@ async def _run_generation(
|
||||
),
|
||||
project_id=reusable_history.project_id,
|
||||
)
|
||||
project_id_trace = None
|
||||
resolved_prompt_text = prompt_text or _compose_prompt_text(request)
|
||||
if preferred_project_id and reusable_history is not None:
|
||||
project_id = reusable_history.project_id
|
||||
elif reusable_history and manager.get_open_pull_request(project_id=reusable_history.project_id):
|
||||
elif reusable_history and not is_explicit_new_project and manager.get_open_pull_request(project_id=reusable_history.project_id):
|
||||
project_id = reusable_history.project_id
|
||||
else:
|
||||
project_id = _build_project_id(request.name)
|
||||
if is_explicit_new_project or prompt_text:
|
||||
project_id, project_id_trace = await _derive_project_id_for_request(
|
||||
request,
|
||||
prompt_text=resolved_prompt_text,
|
||||
prompt_routing=prompt_routing,
|
||||
existing_projects=_build_project_identity_context(manager),
|
||||
)
|
||||
else:
|
||||
project_id = _build_project_id(request.name)
|
||||
reusable_history = None
|
||||
resolved_prompt_text = prompt_text or _compose_prompt_text(request)
|
||||
orchestrator = AgentOrchestrator(
|
||||
project_id=project_id,
|
||||
project_name=request.name,
|
||||
@@ -217,6 +299,7 @@ async def _run_generation(
|
||||
existing_history=reusable_history,
|
||||
prompt_source_context=prompt_source_context,
|
||||
prompt_routing=prompt_routing,
|
||||
repo_name_override=repo_name_override,
|
||||
related_issue_hint=related_issue,
|
||||
)
|
||||
result = await orchestrator.run()
|
||||
@@ -240,6 +323,20 @@ async def _run_generation(
|
||||
response_data['repository'] = result.get('repository')
|
||||
response_data['related_issue'] = result.get('related_issue') or (result.get('ui_data') or {}).get('related_issue')
|
||||
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
|
||||
if project_id_trace:
|
||||
manager.log_llm_trace(
|
||||
project_id=project_id,
|
||||
history_id=history.id if history else None,
|
||||
prompt_id=orchestrator.prompt_audit.id if orchestrator.prompt_audit else None,
|
||||
stage=project_id_trace['stage'],
|
||||
provider=project_id_trace['provider'],
|
||||
model=project_id_trace['model'],
|
||||
system_prompt=project_id_trace['system_prompt'],
|
||||
user_prompt=project_id_trace['user_prompt'],
|
||||
assistant_response=project_id_trace['assistant_response'],
|
||||
raw_response=project_id_trace.get('raw_response'),
|
||||
fallback_used=project_id_trace.get('fallback_used', False),
|
||||
)
|
||||
summary_context = {
|
||||
'name': response_data['name'],
|
||||
'description': response_data['description'],
|
||||
@@ -322,6 +419,7 @@ def read_api_info():
|
||||
'/',
|
||||
'/api',
|
||||
'/health',
|
||||
'/llm/runtime',
|
||||
'/generate',
|
||||
'/generate/text',
|
||||
'/projects',
|
||||
@@ -363,6 +461,12 @@ def health_check():
|
||||
}
|
||||
|
||||
|
||||
@app.get('/llm/runtime')
|
||||
def get_llm_runtime():
|
||||
"""Return the active external LLM runtime, guardrail, and tool configuration."""
|
||||
return LLMServiceClient().get_runtime_configuration()
|
||||
|
||||
|
||||
@app.post('/generate')
|
||||
async def generate_software(request: SoftwareRequest, db: DbSession):
|
||||
"""Create and record a software-generation request."""
|
||||
@@ -411,6 +515,7 @@ async def generate_software_from_text(request: FreeformSoftwareRequest, db: DbSe
|
||||
},
|
||||
prompt_routing=routing,
|
||||
preferred_project_id=routing.get('project_id') if routing.get('intent') != 'new_project' else None,
|
||||
repo_name_override=routing.get('repo_name') if routing.get('intent') == 'new_project' else None,
|
||||
related_issue={'number': routing.get('issue_number')} if routing.get('issue_number') is not None else None,
|
||||
)
|
||||
project_data = response.get('data', {})
|
||||
@@ -431,6 +536,21 @@ async def generate_software_from_text(request: FreeformSoftwareRequest, db: DbSe
|
||||
raw_response=interpretation_trace.get('raw_response'),
|
||||
fallback_used=interpretation_trace.get('fallback_used', False),
|
||||
)
|
||||
naming_trace = interpretation_trace.get('project_naming')
|
||||
if naming_trace:
|
||||
manager.log_llm_trace(
|
||||
project_id=project_data.get('project_id'),
|
||||
history_id=project_data.get('history_id'),
|
||||
prompt_id=prompt_id,
|
||||
stage=naming_trace['stage'],
|
||||
provider=naming_trace['provider'],
|
||||
model=naming_trace['model'],
|
||||
system_prompt=naming_trace['system_prompt'],
|
||||
user_prompt=naming_trace['user_prompt'],
|
||||
assistant_response=naming_trace['assistant_response'],
|
||||
raw_response=naming_trace.get('raw_response'),
|
||||
fallback_used=naming_trace.get('fallback_used', False),
|
||||
)
|
||||
response['interpreted_request'] = interpreted
|
||||
response['routing'] = routing
|
||||
response['llm_trace'] = interpretation_trace
|
||||
|
||||
Reference in New Issue
Block a user