Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e495775b91 | |||
| 356c388efb | |||
| fd812476cc | |||
| 032139c14f | |||
| 194d5658a6 | |||
| b9faac8d16 | |||
| 80d7716e65 | |||
| 321bf74aef | |||
| 55ee75106c | |||
| b2829caa02 | |||
| d4b280cf75 | |||
| 806db8537b | |||
| 360ed5c6f3 | |||
| 4b9eb2f359 | |||
| ebfcfb969a | |||
| 56b05eb686 | |||
| 59a7e9787e | |||
| a357a307a7 | |||
| af4247e657 | |||
| 227ad1ad6f | |||
| 82e53a6651 | |||
| e9dc1ede55 | |||
| 6ee1c46826 | |||
| 4f5c87bed9 | |||
| 7180031d1f | |||
| de4feb61cd | |||
| ddb9f2100b | |||
| 034bb3eb63 | |||
| 06a50880b7 | |||
| c66b57f9cb | |||
| ba30f84f49 | |||
| 81935daaf5 | |||
| d2260ac797 | |||
| ca6f39a3e8 | |||
| 5eb5bd426a | |||
| 08af3ed38d | |||
| cc5060d317 | |||
| c51e51c9c2 | |||
| f0ec9169c4 | |||
| 9615c50ccb | |||
| 9fcf2e2d1a | |||
| 67df87072d | |||
| ef249dfbe6 | |||
| 8bbbf6b9ac | |||
| 7f12034bff | |||
| 4430348168 | |||
| 578be7b6f4 | |||
| dbcd3fba91 | |||
| 0eb0bc0d41 | |||
| a73644b1da | |||
| 4c7a089753 | |||
| 4d70a98902 | |||
| f65f0b3603 | |||
| fec96cd049 | |||
| 25b180a2f3 | |||
| 45bcbfe80d | |||
| d82b811e55 | |||
| b10c34f3fc | |||
| f7b8925881 | |||
| 78c8bd68cc | |||
| f17e241871 | |||
| 55c5fca784 | |||
| aa0ca2cb7b | |||
| e824475872 | |||
|
|
0b1384279d |
@@ -46,7 +46,7 @@ create_file() {
|
||||
}
|
||||
|
||||
get_commit_range() {
|
||||
rm $TEMP_FILE_PATH/messages.txt
|
||||
rm -f $TEMP_FILE_PATH/messages.txt
|
||||
if [[ $LAST_TAG =~ $PATTERN ]]; then
|
||||
create_file true
|
||||
else
|
||||
@@ -86,8 +86,8 @@ start() {
|
||||
echo "New version: $new_version"
|
||||
|
||||
gitchangelog | grep -v "[rR]elease:" > HISTORY.md
|
||||
echo $new_version > project_name/VERSION
|
||||
git add project_name/VERSION HISTORY.md
|
||||
echo $new_version > ai_software_factory/VERSION
|
||||
git add ai_software_factory/VERSION HISTORY.md
|
||||
git commit -m "release: version $new_version 🚀"
|
||||
echo "creating git tag : $new_version"
|
||||
git tag $new_version
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
while getopts a:n:u:d: flag
|
||||
do
|
||||
case "${flag}" in
|
||||
a) author=${OPTARG};;
|
||||
n) name=${OPTARG};;
|
||||
u) urlname=${OPTARG};;
|
||||
d) description=${OPTARG};;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "Author: $author";
|
||||
echo "Project Name: $name";
|
||||
echo "Project URL name: $urlname";
|
||||
echo "Description: $description";
|
||||
|
||||
echo "Renaming project..."
|
||||
|
||||
original_author="author_name"
|
||||
original_name="project_name"
|
||||
original_urlname="project_urlname"
|
||||
original_description="project_description"
|
||||
# for filename in $(find . -name "*.*")
|
||||
for filename in $(git ls-files)
|
||||
do
|
||||
sed -i "s/$original_author/$author/g" $filename
|
||||
sed -i "s/$original_name/$name/g" $filename
|
||||
sed -i "s/$original_urlname/$urlname/g" $filename
|
||||
sed -i "s/$original_description/$description/g" $filename
|
||||
echo "Renamed $filename"
|
||||
done
|
||||
|
||||
mv project_name $name
|
||||
|
||||
# This command runs only once on GHA!
|
||||
rm -rf .gitea/template.yml
|
||||
rm -rf project_name
|
||||
rm -rf project_name.Tests
|
||||
@@ -1 +0,0 @@
|
||||
author: rochacbruno
|
||||
@@ -4,6 +4,7 @@ permissions:
|
||||
|
||||
env:
|
||||
SKIP_MAKE_SETUP_CHECK: 'true'
|
||||
DOCKER_API_VERSION: '1.43'
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -41,7 +42,7 @@ jobs:
|
||||
- name: Check version match
|
||||
run: |
|
||||
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
|
||||
if [ "$(cat project_name/VERSION)" = "${GITHUB_REF_NAME}" ] ; then
|
||||
if [ "$(cat ai_software_factory/VERSION)" = "${GITHUB_REF_NAME}" ] ; then
|
||||
echo "Version matches successfully!"
|
||||
else
|
||||
echo "Version must match!"
|
||||
@@ -49,13 +50,17 @@ jobs:
|
||||
fi
|
||||
- name: Login to Gitea container registry
|
||||
uses: docker/login-action@v3
|
||||
env:
|
||||
DOCKER_API_VERSION: ${{ env.DOCKER_API_VERSION }}
|
||||
with:
|
||||
username: gitearobot
|
||||
password: ${{ secrets.PACKAGE_GITEA_PAT }}
|
||||
registry: git.disi.dev
|
||||
- name: Build and publish
|
||||
env:
|
||||
DOCKER_API_VERSION: ${{ env.DOCKER_API_VERSION }}
|
||||
run: |
|
||||
REPOSITORY_OWNER=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $1}' | tr '[:upper:]' '[:lower:]')
|
||||
REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')
|
||||
docker build -t "git.disi.dev/$REPOSITORY_OWNER/project_name:$(cat project_name/VERSION)" -f Containerfile ./
|
||||
docker push "git.disi.dev/$REPOSITORY_OWNER/project_name:$(cat project_name/VERSION)"
|
||||
docker build -t "git.disi.dev/$REPOSITORY_OWNER/ai_software_factory:$(cat ai_software_factory/VERSION)" -f Containerfile ./
|
||||
docker push "git.disi.dev/$REPOSITORY_OWNER/ai_software_factory:$(cat ai_software_factory/VERSION)"
|
||||
@@ -1,48 +0,0 @@
|
||||
name: Rename the project from template
|
||||
|
||||
on: [push]
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
rename-project:
|
||||
if: ${{ !endsWith (gitea.repository, 'Templates/Docker_Image') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
# by default, it uses a depth of 1
|
||||
# this fetches all history so that we can read each commit
|
||||
fetch-depth: 0
|
||||
ref: ${{ gitea.head_ref }}
|
||||
|
||||
- run: echo "REPOSITORY_NAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}' | tr '-' '_')" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
|
||||
- run: echo "REPOSITORY_URLNAME=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $2}')" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
|
||||
- run: echo "REPOSITORY_OWNER=$(echo "$GITHUB_REPOSITORY" | awk -F '/' '{print $1}')" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
|
||||
- name: Is this still a template
|
||||
id: is_template
|
||||
run: echo "::set-output name=is_template::$(ls .gitea/template.yml &> /dev/null && echo true || echo false)"
|
||||
|
||||
- name: Rename the project
|
||||
if: steps.is_template.outputs.is_template == 'true'
|
||||
run: |
|
||||
echo "Renaming the project with -a(author) ${{ env.REPOSITORY_OWNER }} -n(name) ${{ env.REPOSITORY_NAME }} -u(urlname) ${{ env.REPOSITORY_URLNAME }}"
|
||||
.gitea/rename_project.sh -a ${{ env.REPOSITORY_OWNER }} -n ${{ env.REPOSITORY_NAME }} -u ${{ env.REPOSITORY_URLNAME }} -d "Awesome ${{ env.REPOSITORY_NAME }} created by ${{ env.REPOSITORY_OWNER }}"
|
||||
|
||||
- name: Remove renaming workflow
|
||||
if: steps.is_template.outputs.is_template == 'true'
|
||||
run: |
|
||||
rm .gitea/workflows/rename_project.yml
|
||||
rm .gitea/rename_project.sh
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_message: "✅ Ready to clone and code."
|
||||
# commit_options: '--amend --no-edit'
|
||||
push_options: --force
|
||||
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
sqlite.db
|
||||
.nicegui/
|
||||
@@ -1,15 +1,15 @@
|
||||
# How to develop on this project
|
||||
|
||||
project_name welcomes contributions from the community.
|
||||
ai_software_factory welcomes contributions from the community.
|
||||
|
||||
This instructions are for linux base systems. (Linux, MacOS, BSD, etc.)
|
||||
|
||||
## Setting up your own fork of this repo.
|
||||
|
||||
- On gitea interface click on `Fork` button.
|
||||
- Clone your fork of this repo. `git clone git@git.disi.dev:YOUR_GIT_USERNAME/project_urlname.git`
|
||||
- Enter the directory `cd project_urlname`
|
||||
- Add upstream repo `git remote add upstream https://git.disi.dev/author_name/project_urlname`
|
||||
- Clone your fork of this repo. `git clone git@git.disi.dev:YOUR_GIT_USERNAME/ai-test.git`
|
||||
- Enter the directory `cd ai-test`
|
||||
- Add upstream repo `git remote add upstream https://git.disi.dev/Projects/ai-test`
|
||||
- initialize repository for use `make setup`
|
||||
|
||||
## Install the project in develop mode
|
||||
|
||||
@@ -1,6 +1,43 @@
|
||||
FROM alpine
|
||||
# AI Software Factory Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
# Set work directory
|
||||
WORKDIR /app
|
||||
COPY ./project_name/* /app
|
||||
|
||||
CMD ["sh", "/app/hello_world.sh"]
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dependencies
|
||||
COPY ./ai_software_factory/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY ./ai_software_factory .
|
||||
|
||||
# Set up environment file if it exists, otherwise use .env.example
|
||||
# RUN if [ -f .env ]; then \
|
||||
# cat .env; \
|
||||
# elif [ -f .env.example ]; then \
|
||||
# cp .env.example .env; \
|
||||
# fi
|
||||
|
||||
# Initialize database tables (use SQLite by default, can be overridden by DB_POOL_SIZE env var)
|
||||
# RUN python database.py || true
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
|
||||
290
HISTORY.md
290
HISTORY.md
@@ -4,6 +4,296 @@ Changelog
|
||||
|
||||
(unreleased)
|
||||
------------
|
||||
- Feat: gitea issue integration, refs NOISSUE. [Simon Diesenreiter]
|
||||
- Feat: better history data, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.6.5 (2026-04-10)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Better n8n workflow, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.6.4 (2026-04-10)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Add Telegram helper functions, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.6.3 (2026-04-10)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- N8n workflow generation, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.6.2 (2026-04-10)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix Quasar layout issues, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.6.1 (2026-04-10)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix commit for version push, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Chore: add more health info for n8n, refs NOISSUE. [Simon
|
||||
Diesenreiter]
|
||||
|
||||
|
||||
0.6.0 (2026-04-10)
|
||||
------------------
|
||||
- Feat(api): expose database target in health refs NOISSUE. [Simon
|
||||
Diesenreiter]
|
||||
- Fix(db): prefer postgres config in production refs NOISSUE. [Simon
|
||||
Diesenreiter]
|
||||
|
||||
|
||||
0.5.0 (2026-04-10)
|
||||
------------------
|
||||
- Feat(dashboard): expose repository urls refs NOISSUE. [Simon
|
||||
Diesenreiter]
|
||||
- Feat(factory): serve dashboard at root and create project repos refs
|
||||
NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.4.1 (2026-04-10)
|
||||
------------------
|
||||
- Fix(ci): pin docker api version for release builds refs NOISSUE.
|
||||
[Simon Diesenreiter]
|
||||
|
||||
|
||||
0.4.0 (2026-04-10)
|
||||
------------------
|
||||
- Chore(git): ignore local sqlite database refs NOISSUE. [Simon
|
||||
Diesenreiter]
|
||||
- Feat(factory): implement db-backed dashboard and workflow automation
|
||||
refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.3.6 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Rename gitea workflow, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.5 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Some cleanup, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.4 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix database init, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.3 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix runtime errors, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.2 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Add back DB init endpoints, ref NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.1 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix broken Docker build, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.3.0 (2026-04-04)
|
||||
------------------
|
||||
- Feat: dashboard via NiceGUI, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.2.2 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Add missing jijna2 reference, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.2.1 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Make dashbaord work, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.2.0 (2026-04-04)
|
||||
------------------
|
||||
- Feat: Add Python-native dashboard and main.py cleanup, refs NOISSUE.
|
||||
[Simon Diesenreiter]
|
||||
|
||||
|
||||
0.1.8 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Broken python module references, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.7 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- More bugfixes, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.6 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Proper containerfile, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Chore: update Containerfile to start the app instead of hello world
|
||||
refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.1.5 (2026-04-04)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Bugfix in version generation, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Feat(ai-software-factory): add n8n setup agent and enhance
|
||||
orchestration refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
|
||||
0.1.4 (2026-04-02)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix container build, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.3 (2026-04-02)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Fix version increment logic, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.2 (2026-04-02)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Test version increment logic, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.1 (2026-04-01)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Broken CI build, refs NOISSUE. [Simon Diesenreiter]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
|
||||
|
||||
0.1.0 (2026-04-01)
|
||||
------------------
|
||||
- Feat: initial release, refs NOISSUE. [Simon Diesenreiter]
|
||||
- ✅ Ready to clone and code. [simon]
|
||||
|
||||
|
||||
0.0.1 (2026-03-14)
|
||||
------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
|
||||
28
Makefile
28
Makefile
@@ -1,5 +1,7 @@
|
||||
.ONESHELL:
|
||||
|
||||
DOCKER_API_VERSION ?= 1.43
|
||||
|
||||
.PHONY: issetup
|
||||
issetup:
|
||||
@[ -f .git/hooks/commit-msg ] || [ -z ${SKIP_MAKE_SETUP_CHECK+x} ] || (echo "You must run 'make setup' first to initialize the repo!" && exit 1)
|
||||
@@ -17,26 +19,34 @@ help: ## Show the help.
|
||||
|
||||
.PHONY: fmt
|
||||
fmt: issetup ## Format code using black & isort.
|
||||
$(ENV_PREFIX)isort project_name/
|
||||
$(ENV_PREFIX)black -l 79 project_name/
|
||||
$(ENV_PREFIX)isort ai-software-factory/
|
||||
$(ENV_PREFIX)black -l 79 ai-software-factory/
|
||||
$(ENV_PREFIX)black -l 79 tests/
|
||||
|
||||
.PHONY: test
|
||||
test: issetup ## Run tests with pytest.
|
||||
$(ENV_PREFIX)pytest ai-software-factory/tests/ -v --tb=short
|
||||
|
||||
.PHONY: test-cov
|
||||
test-cov: issetup ## Run tests with coverage report.
|
||||
$(ENV_PREFIX)pytest ai-software-factory/tests/ -v --tb=short --cov=ai-software-factory --cov-report=html --cov-report=term-missing
|
||||
|
||||
.PHONY: lint
|
||||
lint: issetup ## Run pep8, black, mypy linters.
|
||||
$(ENV_PREFIX)flake8 project_name/
|
||||
$(ENV_PREFIX)black -l 79 --check project_name/
|
||||
$(ENV_PREFIX)flake8 ai-software-factory/
|
||||
$(ENV_PREFIX)black -l 79 --check ai-software-factory/
|
||||
$(ENV_PREFIX)black -l 79 --check tests/
|
||||
$(ENV_PREFIX)mypy --ignore-missing-imports project_name/
|
||||
$(ENV_PREFIX)mypy --ignore-missing-imports ai-software-factory/
|
||||
|
||||
.PHONY: release
|
||||
release: issetup ## Create a new tag for release.
|
||||
@./.gitea/conventional_commits/generate-version.sh
|
||||
|
||||
.PHONY: build
|
||||
build: issetup ## Create a new tag for release.
|
||||
@docker build -t project_name:$(cat project_name/VERSION) -f Containerfile .
|
||||
build: issetup ## Create a new tag for release.
|
||||
@DOCKER_API_VERSION=$(DOCKER_API_VERSION) docker build -t ai-software-factory:$(cat ai_software_factory/VERSION) -f Containerfile .
|
||||
|
||||
# This project has been generated from rochacbruno/python-project-template
|
||||
# __author__ = 'rochacbruno'
|
||||
#igest__ = 'rochacbruno'
|
||||
# __repo__ = https://github.com/rochacbruno/python-project-template
|
||||
# __sponsor__ = https://github.com/sponsors/rochacbruno/
|
||||
# __sponsor__ = https://github.com/sponsors/rochacbruno/
|
||||
|
||||
229
README.md
229
README.md
@@ -1,13 +1,232 @@
|
||||
# project_name
|
||||
# AI Software Factory
|
||||
|
||||
Project description goes here.
|
||||
Automated software generation service powered by Ollama LLM. This service allows users to specify via Telegram what kind of software they would like, and an agent hosted in Ollama will create it iteratively, testing it while building out the source code and committing to gitea.
|
||||
|
||||
## Usage
|
||||
## Features
|
||||
|
||||
- **Telegram Integration**: Receive software requests via Telegram bot
|
||||
- **Ollama LLM**: Uses Ollama-hosted models for code generation
|
||||
- **Git Integration**: Creates a dedicated Gitea repository per generated project inside your organization
|
||||
- **Pull Requests**: Creates PRs for user review before merging
|
||||
- **Web UI**: Beautiful dashboard for monitoring project progress
|
||||
- **n8n Workflows**: Bridges Telegram with LLMs via n8n webhooks
|
||||
- **Comprehensive Testing**: Full test suite with pytest coverage
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────┐ ┌──────────────┐ ┌──────────┐ ┌─────────┐
|
||||
│ Telegram │────▶│ n8n Webhook│────▶│ FastAPI │────▶│ Ollama │
|
||||
└─────────────┘ └──────────────┘ └──────────┘ └─────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────┐
|
||||
│ Git/Gitea │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
- Ollama running locally or on same network
|
||||
- Gitea instance with API token
|
||||
- n8n instance for Telegram webhook
|
||||
|
||||
### Configuration
|
||||
|
||||
Create a `.env` file in the project root:
|
||||
|
||||
```bash
|
||||
$ docker build -t <tagname> -f Containerfile .
|
||||
# Server
|
||||
HOST=0.0.0.0
|
||||
PORT=8000
|
||||
|
||||
# Ollama
|
||||
OLLAMA_URL=http://localhost:11434
|
||||
OLLAMA_MODEL=llama3
|
||||
|
||||
# Gitea
|
||||
GITEA_URL=https://gitea.yourserver.com
|
||||
GITEA_TOKEN=your_gitea_api_token
|
||||
GITEA_OWNER=ai-software-factory
|
||||
# Optional legacy fixed-repository mode. Leave empty to create one repo per project.
|
||||
GITEA_REPO=
|
||||
|
||||
# Database
|
||||
# In production, provide PostgreSQL settings. They take precedence over the SQLite default.
|
||||
# Setting USE_SQLITE=false is still supported if you want to make the choice explicit.
|
||||
POSTGRES_HOST=postgres.yourserver.com
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=ai_software_factory
|
||||
POSTGRES_PASSWORD=change-me
|
||||
POSTGRES_DB=ai_software_factory
|
||||
|
||||
# n8n
|
||||
N8N_WEBHOOK_URL=http://n8n.yourserver.com/webhook/telegram
|
||||
|
||||
# Telegram
|
||||
TELEGRAM_BOT_TOKEN=your_telegram_bot_token
|
||||
TELEGRAM_CHAT_ID=your_chat_id
|
||||
```
|
||||
|
||||
### Build and Run
|
||||
|
||||
```bash
|
||||
# Build Docker image
|
||||
DOCKER_API_VERSION=1.43 docker build -t ai-software-factory -f Containerfile .
|
||||
|
||||
# Run with Docker Compose
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
1. **Send a request via Telegram:**
|
||||
|
||||
```
|
||||
Build an internal task management app for our operations team.
|
||||
It should support user authentication, task CRUD, notifications, and reporting.
|
||||
Prefer FastAPI with PostgreSQL and a simple web dashboard.
|
||||
```
|
||||
|
||||
The backend now interprets free-form Telegram text with Ollama before generation.
|
||||
If `TELEGRAM_CHAT_ID` is set, the Telegram-trigger workflow only reacts to messages from that specific chat.
|
||||
|
||||
2. **Monitor progress via Web UI:**
|
||||
|
||||
Open `http://yourserver:8000/` to see the dashboard and `http://yourserver:8000/api` for API metadata
|
||||
|
||||
3. **Review PRs in Gitea:**
|
||||
|
||||
Check your gitea repository for generated PRs
|
||||
|
||||
If you deploy the container with PostgreSQL environment variables set, the service now selects PostgreSQL automatically even though SQLite remains the default for local/test usage.
|
||||
|
||||
## API Endpoints
|
||||
|
||||
| Endpoint | Method | Description |
|
||||
|------|------|-------|
|
||||
| `/` | GET | Dashboard |
|
||||
| `/api` | GET | API information |
|
||||
| `/health` | GET | Health check |
|
||||
| `/generate` | POST | Generate new software |
|
||||
| `/generate/text` | POST | Interpret free-form text and generate software |
|
||||
| `/status/{project_id}` | GET | Get project status |
|
||||
| `/projects` | GET | List all projects |
|
||||
|
||||
## Development
|
||||
|
||||
Read the [CONTRIBUTING.md](CONTRIBUTING.md) file.
|
||||
### Makefile Targets
|
||||
|
||||
```bash
|
||||
make help # Show available targets
|
||||
make setup # Initialize repository
|
||||
make fmt # Format code
|
||||
make lint # Run linters
|
||||
make test # Run tests
|
||||
make test-cov # Run tests with coverage report
|
||||
make release # Create new release tag
|
||||
make build # Build Docker image
|
||||
```
|
||||
|
||||
### Running in Development
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
uvicorn main:app --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Run the test suite:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
make test
|
||||
|
||||
# Run tests with coverage report
|
||||
make test-cov
|
||||
|
||||
# Run specific test file
|
||||
pytest tests/test_main.py -v
|
||||
|
||||
# Run tests with verbose output
|
||||
pytest tests/ -v --tb=short
|
||||
```
|
||||
|
||||
### Test Coverage
|
||||
|
||||
View HTML coverage report:
|
||||
|
||||
```bash
|
||||
make test-cov
|
||||
open htmlcov/index.html
|
||||
```
|
||||
|
||||
### Test Structure
|
||||
|
||||
```
|
||||
tests/
|
||||
├── conftest.py # Pytest fixtures and configuration
|
||||
├── test_main.py # Tests for main.py FastAPI app
|
||||
├── test_config.py # Tests for config.py settings
|
||||
├── test_git_manager.py # Tests for git operations
|
||||
├── test_ui_manager.py # Tests for UI rendering
|
||||
├── test_gitea.py # Tests for Gitea API integration
|
||||
├── test_telegram.py # Tests for Telegram integration
|
||||
├── test_orchestrator.py # Tests for agent orchestrator
|
||||
├── test_integration.py # Integration tests for full workflow
|
||||
├── test_config_integration.py # Configuration integration tests
|
||||
├── test_agents_integration.py # Agent integration tests
|
||||
├── test_edge_cases.py # Edge case tests
|
||||
└── test_postgres_integration.py # PostgreSQL integration tests
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
ai-software-factory/
|
||||
├── main.py # FastAPI application
|
||||
├── config.py # Configuration settings
|
||||
├── requirements.txt # Python dependencies
|
||||
├── Containerfile # Docker build file
|
||||
├── README.md # This file
|
||||
├── Makefile # Development utilities
|
||||
├── .env.example # Environment template
|
||||
├── .gitignore # Git ignore rules
|
||||
├── HISTORY.md # Changelog
|
||||
├── pytest.ini # Pytest configuration
|
||||
├── docker-compose.yml # Multi-service orchestration
|
||||
├── .env # Environment variables (not in git)
|
||||
├── tests/ # Test suite
|
||||
│ ├── __init__.py
|
||||
│ ├── conftest.py
|
||||
│ ├── test_*.py # Test files
|
||||
│ └── pytest.ini
|
||||
├── agents/
|
||||
│ ├── __init__.py
|
||||
│ ├── orchestrator.py # Main agent orchestrator
|
||||
│ ├── git_manager.py # Git operations
|
||||
│ ├── ui_manager.py # Web UI management
|
||||
│ ├── telegram.py # Telegram integration
|
||||
│ └── gitea.py # Gitea API client
|
||||
└── n8n/ # n8n webhook configurations
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Never commit `.env` files to git
|
||||
- Use environment variables for sensitive data
|
||||
- Rotate Gitea API tokens regularly
|
||||
- Restrict Telegram bot permissions
|
||||
- Use HTTPS for Gitea and n8n endpoints
|
||||
|
||||
## License
|
||||
|
||||
MIT License - See LICENSE file for details
|
||||
|
||||
## Contributing
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md) for development guidelines.
|
||||
|
||||
45
ai_software_factory/.env.example
Normal file
45
ai_software_factory/.env.example
Normal file
@@ -0,0 +1,45 @@
|
||||
# AI Software Factory Environment Variables
|
||||
|
||||
# Server
|
||||
HOST=0.0.0.0
|
||||
PORT=8000
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# Ollama
|
||||
OLLAMA_URL=http://localhost:11434
|
||||
OLLAMA_MODEL=llama3
|
||||
|
||||
# Gitea
|
||||
# Configure Gitea API for your organization
|
||||
# GITEA_URL can be left empty to use GITEA_ORGANIZATION instead of GITEA_OWNER
|
||||
GITEA_URL=https://gitea.yourserver.com
|
||||
GITEA_TOKEN=your_gitea_api_token
|
||||
GITEA_OWNER=your_organization_name
|
||||
GITEA_REPO= (optional legacy fixed repository mode; leave empty to create one repo per project)
|
||||
|
||||
# n8n
|
||||
# n8n webhook for Telegram integration
|
||||
N8N_WEBHOOK_URL=http://n8n.yourserver.com/webhook/telegram
|
||||
# n8n API for automatic webhook configuration
|
||||
N8N_API_URL=http://n8n.yourserver.com
|
||||
N8N_USER=n8n_admin
|
||||
N8N_PASSWORD=your_secure_password
|
||||
|
||||
# Telegram
|
||||
TELEGRAM_BOT_TOKEN=your_telegram_bot_token
|
||||
TELEGRAM_CHAT_ID=your_chat_id
|
||||
|
||||
# PostgreSQL
|
||||
# In production, provide PostgreSQL settings below. They now take precedence over the SQLite default.
|
||||
# You can also set USE_SQLITE=false explicitly if you want the intent to be obvious.
|
||||
POSTGRES_HOST=postgres
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=ai_test
|
||||
POSTGRES_PASSWORD=your_secure_password
|
||||
POSTGRES_DB=ai_test
|
||||
|
||||
# Database Connection Pool Settings
|
||||
DB_POOL_SIZE=10
|
||||
DB_MAX_OVERFLOW=20
|
||||
DB_POOL_RECYCLE=3600
|
||||
DB_POOL_TIMEOUT=30
|
||||
88
ai_software_factory/.gitignore
soft
Normal file
88
ai_software_factory/.gitignore
soft
Normal file
@@ -0,0 +1,88 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Project specific
|
||||
.git/
|
||||
.gitignore
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
ai-software-factory/
|
||||
n8n/
|
||||
ui/
|
||||
docs/
|
||||
tests/
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
*.log
|
||||
@@ -0,0 +1 @@
|
||||
{"dark_mode":false}
|
||||
@@ -0,0 +1 @@
|
||||
{"dark_mode":false}
|
||||
73
ai_software_factory/CONTRIBUTING.md
Normal file
73
ai_software_factory/CONTRIBUTING.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Contributing to AI Software Factory
|
||||
|
||||
Thank you for your interest in contributing to the AI Software Factory project!
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please note that we have a Code of Conduct that all contributors are expected to follow.
|
||||
|
||||
## How to Contribute
|
||||
|
||||
### Reporting Bugs
|
||||
|
||||
Before creating bug reports, please check existing issues as the bug may have already been reported and fixed.
|
||||
|
||||
When reporting a bug, include:
|
||||
|
||||
- A clear description of the bug
|
||||
- Steps to reproduce the bug
|
||||
- Expected behavior
|
||||
- Actual behavior
|
||||
- Screenshots if applicable
|
||||
- Your environment details (OS, Python version, etc.)
|
||||
|
||||
### Suggesting Features
|
||||
|
||||
Feature suggestions are welcome! Please create an issue with:
|
||||
|
||||
- A clear title and description
|
||||
- Use cases for the feature
|
||||
- Any relevant links or references
|
||||
|
||||
### Pull Requests
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a new branch (`git checkout -b feature/feature-name`)
|
||||
3. Make your changes
|
||||
4. Commit your changes (`git commit -am 'Add some feature'`)
|
||||
5. Push to the branch (`git push origin feature/feature-name`)
|
||||
6. Create a new Pull Request
|
||||
|
||||
### Style Guide
|
||||
|
||||
- Follow the existing code style
|
||||
- Add comments for complex logic
|
||||
- Write tests for new features
|
||||
- Update documentation as needed
|
||||
|
||||
## Development Setup
|
||||
|
||||
1. Clone the repository
|
||||
2. Create a virtual environment
|
||||
3. Install dependencies (`pip install -r requirements.txt`)
|
||||
4. Run tests (`make test`)
|
||||
5. Make your changes
|
||||
6. Run tests again to ensure nothing is broken
|
||||
|
||||
## Commit Messages
|
||||
|
||||
Follow the conventional commits format:
|
||||
|
||||
```
|
||||
feat: add new feature
|
||||
fix: fix bug
|
||||
docs: update documentation
|
||||
style: format code
|
||||
refactor: refactor code
|
||||
test: add tests
|
||||
chore: update dependencies
|
||||
```
|
||||
|
||||
## Questions?
|
||||
|
||||
Feel free to open an issue or discussion for any questions.
|
||||
41
ai_software_factory/HISTORY.md
Normal file
41
ai_software_factory/HISTORY.md
Normal file
@@ -0,0 +1,41 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
## [0.0.1] - 2026-03-14
|
||||
|
||||
### Added
|
||||
- Initial commit with AI Software Factory service
|
||||
- FastAPI backend for software generation
|
||||
- Telegram integration via n8n webhook
|
||||
- Ollama LLM integration for code generation
|
||||
- Gitea API integration for commits and PRs
|
||||
- Web UI dashboard for monitoring progress
|
||||
- Docker and docker-compose configuration for Unraid
|
||||
- Environment configuration templates
|
||||
- Makefile with development utilities
|
||||
- PostgreSQL integration with connection pooling
|
||||
- Comprehensive audit trail functionality
|
||||
- User action tracking
|
||||
- System log monitoring
|
||||
- Database initialization and migration support
|
||||
- Full test suite with pytest coverage
|
||||
|
||||
### Features
|
||||
- Automated software generation from Telegram requests
|
||||
- Iterative code generation with Ollama
|
||||
- Git commit automation
|
||||
- Pull request creation for user review
|
||||
- Real-time progress monitoring via web UI
|
||||
- n8n workflow integration
|
||||
- Complete audit trail for compliance and debugging
|
||||
- Connection pooling for database efficiency
|
||||
- Health check endpoints
|
||||
- Persistent volumes for git repos and n8n data
|
||||
|
||||
### Infrastructure
|
||||
- Alpine-based Docker image
|
||||
- GPU support for Ollama
|
||||
- Persistent volumes for git repos and n8n data
|
||||
- Health check endpoints
|
||||
- PostgreSQL with connection pooling
|
||||
- Docker Compose for multi-service orchestration
|
||||
28
ai_software_factory/Makefile
Normal file
28
ai_software_factory/Makefile
Normal file
@@ -0,0 +1,28 @@
|
||||
.PHONY: help run-api run-frontend run-tests init-db clean
|
||||
|
||||
help:
|
||||
@echo "Available targets:"
|
||||
@echo " make run-api - Run FastAPI app with NiceGUI frontend (default)"
|
||||
@echo " make run-tests - Run pytest tests"
|
||||
@echo " make init-db - Initialize database"
|
||||
@echo " make clean - Remove container volumes"
|
||||
@echo " make rebuild - Rebuild and run container"
|
||||
|
||||
run-api:
|
||||
@echo "Starting FastAPI app with NiceGUI frontend..."
|
||||
@bash start.sh dev
|
||||
|
||||
run-frontend:
|
||||
@echo "NiceGUI is now integrated with FastAPI - use 'make run-api' to start everything together"
|
||||
|
||||
run-tests:
|
||||
pytest -v
|
||||
|
||||
init-db:
|
||||
@python -c "from main import app; from database import init_db; init_db()"
|
||||
|
||||
clean:
|
||||
@echo "Cleaning up..."
|
||||
@docker-compose down -v
|
||||
|
||||
rebuild: clean run-api
|
||||
215
ai_software_factory/README.md
Normal file
215
ai_software_factory/README.md
Normal file
@@ -0,0 +1,215 @@
|
||||
# AI Software Factory
|
||||
|
||||
Automated software generation service powered by Ollama LLM. This service allows users to specify via Telegram what kind of software they would like, and an agent hosted in Ollama will create it iteratively, testing it while building out the source code and committing to gitea.
|
||||
|
||||
## Features
|
||||
|
||||
- **Telegram Integration**: Receive software requests via Telegram bot
|
||||
- **Ollama LLM**: Uses Ollama-hosted models for code generation
|
||||
- **Git Integration**: Automatically commits code to gitea
|
||||
- **Pull Requests**: Creates PRs for user review before merging
|
||||
- **Web UI**: Beautiful dashboard for monitoring project progress
|
||||
- **n8n Workflows**: Bridges Telegram with LLMs via n8n webhooks
|
||||
- **Comprehensive Testing**: Full test suite with pytest coverage
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────┐ ┌──────────────┐ ┌──────────┐ ┌─────────┐
|
||||
│ Telegram │────▶│ n8n Webhook│────▶│ FastAPI │────▶│ Ollama │
|
||||
└─────────────┘ └──────────────┘ └──────────┘ └─────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────┐
|
||||
│ Git/Gitea │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
- Ollama running locally or on same network
|
||||
- Gitea instance with API token
|
||||
- n8n instance for Telegram webhook
|
||||
|
||||
### Configuration
|
||||
|
||||
Create a `.env` file in the project root:
|
||||
|
||||
```bash
|
||||
# Server
|
||||
HOST=0.0.0.0
|
||||
PORT=8000
|
||||
|
||||
# Ollama
|
||||
OLLAMA_URL=http://localhost:11434
|
||||
OLLAMA_MODEL=llama3
|
||||
|
||||
# Gitea
|
||||
GITEA_URL=https://gitea.yourserver.com
|
||||
GITEA_TOKEN= analyze your_gitea_api_token
|
||||
GITEA_OWNER=ai-software-factory
|
||||
GITEA_REPO=ai-software-factory
|
||||
|
||||
# n8n
|
||||
N8N_WEBHOOK_URL=http://n8n.yourserver.com/webhook/telegram
|
||||
|
||||
# Telegram
|
||||
TELEGRAM_BOT_TOKEN=your_telegram_bot_token
|
||||
TELEGRAM_CHAT_ID=your_chat_id
|
||||
```
|
||||
|
||||
### Build and Run
|
||||
|
||||
```bash
|
||||
# Build Docker image
|
||||
docker build -t ai-software-factory -f Containerfile .
|
||||
|
||||
# Run with Docker Compose
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
1. **Send a request via Telegram:**
|
||||
|
||||
```
|
||||
Name: My Awesome App
|
||||
Description: A web application for managing tasks
|
||||
Features: user authentication, task CRUD, notifications
|
||||
```
|
||||
|
||||
2. **Monitor progress via Web UI:**
|
||||
|
||||
Open `http://yourserver:8000` to see real-time progress
|
||||
|
||||
3. **Review PRs in Gitea:**
|
||||
|
||||
Check your gitea repository for generated PRs
|
||||
|
||||
## API Endpoints
|
||||
|
||||
| Endpoint | Method | Description |
|
||||
|------|------|-------|
|
||||
| `/` | GET | API information |
|
||||
| `/health` | GET | Health check |
|
||||
| `/generate` | POST | Generate new software |
|
||||
| `/status/{project_id}` | GET | Get project status |
|
||||
| `/projects` | GET | List all projects |
|
||||
|
||||
## Development
|
||||
|
||||
### Makefile Targets
|
||||
|
||||
```bash
|
||||
make help # Show available targets
|
||||
make setup # Initialize repository
|
||||
make fmt # Format code
|
||||
make lint # Run linters
|
||||
make test # Run tests
|
||||
make test-cov # Run tests with coverage report
|
||||
make release # Create new release tag
|
||||
make build # Build Docker image
|
||||
```
|
||||
|
||||
### Running in Development
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
uvicorn main:app --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Run the test suite:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
make test
|
||||
|
||||
# Run tests with coverage report
|
||||
make test-cov
|
||||
|
||||
# Run specific test file
|
||||
pytest tests/test_main.py -v
|
||||
|
||||
# Run tests with verbose output
|
||||
pytest tests/ -v --tb=short
|
||||
```
|
||||
|
||||
### Test Coverage
|
||||
|
||||
View HTML coverage report:
|
||||
|
||||
```bash
|
||||
make test-cov
|
||||
open htmlcov/index.html
|
||||
```
|
||||
|
||||
### Test Structure
|
||||
|
||||
```
|
||||
tests/
|
||||
├── conftest.py # Pytest fixtures and configuration
|
||||
├── test_main.py # Tests for main.py FastAPI app
|
||||
├── test_config.py # Tests for config.py settings
|
||||
├── test_git_manager.py # Tests for git operations
|
||||
├── test_ui_manager.py # Tests for UI rendering
|
||||
├── test_gitea.py # Tests for Gitea API integration
|
||||
├── test_telegram.py # Tests for Telegram integration
|
||||
├── test_orchestrator.py # Tests for agent orchestrator
|
||||
├── test_integration.py # Integration tests for full workflow
|
||||
├── test_config_integration.py # Configuration integration tests
|
||||
├── test_agents_integration.py # Agent integration tests
|
||||
├── test_edge_cases.py # Edge case tests
|
||||
└── test_postgres_integration.py # PostgreSQL integration tests
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
ai-software-factory/
|
||||
├── main.py # FastAPI application
|
||||
├── config.py # Configuration settings
|
||||
├── requirements.txt # Python dependencies
|
||||
├── Containerfile # Docker build file
|
||||
├── README.md # This file
|
||||
├── Makefile # Development utilities
|
||||
├── .env.example # Environment template
|
||||
├── .gitignore # Git ignore rules
|
||||
├── HISTORY.md # Changelog
|
||||
├── pytest.ini # Pytest configuration
|
||||
├── docker-compose.yml # Multi-service orchestration
|
||||
├── .env # Environment variables (not in git)
|
||||
├── tests/ # Test suite
|
||||
│ ├── __init__.py
|
||||
│ ├── conftest.py
|
||||
│ ├── test_*.py # Test files
|
||||
│ └── pytest.ini
|
||||
├── agents/
|
||||
│ ├── __init__.py
|
||||
│ ├── orchestrator.py # Main agent orchestrator
|
||||
│ ├── git_manager.py # Git operations
|
||||
│ ├── ui_manager.py # Web UI management
|
||||
│ ├── telegram.py # Telegram integration
|
||||
│ └── gitea.py # Gitea API client
|
||||
└── n8n/ # n8n webhook configurations
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Never commit `.env` files to git
|
||||
- Use environment variables for sensitive data
|
||||
- Rotate Gitea API tokens regularly
|
||||
- Restrict Telegram bot permissions
|
||||
- Use HTTPS for Gitea and n8n endpoints
|
||||
|
||||
## License
|
||||
|
||||
MIT License - See LICENSE file for details
|
||||
|
||||
## Contributing
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md) for development guidelines.
|
||||
1
ai_software_factory/VERSION
Normal file
1
ai_software_factory/VERSION
Normal file
@@ -0,0 +1 @@
|
||||
0.7.0
|
||||
3
ai_software_factory/__init__.py
Normal file
3
ai_software_factory/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""AI Software Factory - Automated software generation service."""
|
||||
|
||||
__version__ = "0.0.1"
|
||||
17
ai_software_factory/agents/__init__.py
Normal file
17
ai_software_factory/agents/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""AI Software Factory agents."""
|
||||
|
||||
from .orchestrator import AgentOrchestrator
|
||||
from .git_manager import GitManager
|
||||
from .ui_manager import UIManager
|
||||
from .telegram import TelegramHandler
|
||||
from .gitea import GiteaAPI
|
||||
from .database_manager import DatabaseManager
|
||||
|
||||
__all__ = [
|
||||
"AgentOrchestrator",
|
||||
"GitManager",
|
||||
"UIManager",
|
||||
"TelegramHandler",
|
||||
"GiteaAPI",
|
||||
"DatabaseManager"
|
||||
]
|
||||
136
ai_software_factory/agents/change_summary.py
Normal file
136
ai_software_factory/agents/change_summary.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""Generate concise chat-friendly summaries of software generation results."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
except ImportError:
|
||||
from config import settings
|
||||
|
||||
|
||||
class ChangeSummaryGenerator:
|
||||
"""Create a readable overview of generated changes for chat responses."""
|
||||
|
||||
def __init__(self, ollama_url: str | None = None, model: str | None = None):
|
||||
self.ollama_url = (ollama_url or settings.ollama_url).rstrip('/')
|
||||
self.model = model or settings.OLLAMA_MODEL
|
||||
|
||||
async def summarize(self, context: dict) -> str:
|
||||
"""Summarize project changes with Ollama, or fall back to a deterministic overview."""
|
||||
summary, _trace = await self.summarize_with_trace(context)
|
||||
return summary
|
||||
|
||||
async def summarize_with_trace(self, context: dict) -> tuple[str, dict]:
|
||||
"""Summarize project changes with Ollama, or fall back to a deterministic overview."""
|
||||
prompt = self._prompt(context)
|
||||
system_prompt = (
|
||||
'You write concise but informative mobile chat summaries of software delivery work. '
|
||||
'Write 3 to 5 sentences. Mention the application goal, main delivered pieces, '
|
||||
'technical direction, and what the user should expect next. Avoid markdown bullets.'
|
||||
)
|
||||
try:
|
||||
import aiohttp
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
f'{self.ollama_url}/api/chat',
|
||||
json={
|
||||
'model': self.model,
|
||||
'stream': False,
|
||||
'messages': [
|
||||
{
|
||||
'role': 'system',
|
||||
'content': system_prompt,
|
||||
},
|
||||
{'role': 'user', 'content': prompt},
|
||||
],
|
||||
},
|
||||
) as resp:
|
||||
payload = await resp.json()
|
||||
if 200 <= resp.status < 300:
|
||||
content = payload.get('message', {}).get('content', '').strip()
|
||||
if content:
|
||||
return content, {
|
||||
'stage': 'change_summary',
|
||||
'provider': 'ollama',
|
||||
'model': self.model,
|
||||
'system_prompt': system_prompt,
|
||||
'user_prompt': prompt,
|
||||
'assistant_response': content,
|
||||
'raw_response': payload,
|
||||
'fallback_used': False,
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fallback = self._fallback(context)
|
||||
return fallback, {
|
||||
'stage': 'change_summary',
|
||||
'provider': 'fallback',
|
||||
'model': self.model,
|
||||
'system_prompt': system_prompt,
|
||||
'user_prompt': prompt,
|
||||
'assistant_response': fallback,
|
||||
'raw_response': {'fallback': 'deterministic'},
|
||||
'fallback_used': True,
|
||||
}
|
||||
|
||||
def _prompt(self, context: dict) -> str:
|
||||
features = ', '.join(context.get('features') or []) or 'No explicit features recorded'
|
||||
tech_stack = ', '.join(context.get('tech_stack') or []) or 'No explicit tech stack recorded'
|
||||
changed_files = ', '.join(context.get('changed_files') or []) or 'No files recorded'
|
||||
logs = ' | '.join((context.get('logs') or [])[:4]) or 'No log excerpts'
|
||||
return (
|
||||
f"Project name: {context.get('name', 'Unknown project')}\n"
|
||||
f"Description: {context.get('description', '')}\n"
|
||||
f"Features: {features}\n"
|
||||
f"Tech stack: {tech_stack}\n"
|
||||
f"Changed files: {changed_files}\n"
|
||||
f"Repository: {context.get('repository_url') or 'No repository URL'}\n"
|
||||
f"Pull request: {context.get('pull_request_url') or 'No pull request URL'}\n"
|
||||
f"Pull request state: {context.get('pull_request_state') or 'No pull request state'}\n"
|
||||
f"Status message: {context.get('message') or ''}\n"
|
||||
f"Log excerpts: {logs}\n"
|
||||
"Write a broad but phone-friendly summary of what was done."
|
||||
)
|
||||
|
||||
def _fallback(self, context: dict) -> str:
|
||||
name = context.get('name', 'The project')
|
||||
description = context.get('description') or 'a software request'
|
||||
changed_files = context.get('changed_files') or []
|
||||
features = context.get('features') or []
|
||||
tech_stack = context.get('tech_stack') or []
|
||||
repo_url = context.get('repository_url')
|
||||
repo_status = context.get('repository_status')
|
||||
pr_url = context.get('pull_request_url')
|
||||
pr_state = context.get('pull_request_state')
|
||||
|
||||
first_sentence = f"{name} was generated from your request for {description}."
|
||||
feature_sentence = (
|
||||
f"The delivery focused on {', '.join(features[:3])}."
|
||||
if features else
|
||||
"The delivery focused on turning the request into an initial runnable application skeleton."
|
||||
)
|
||||
tech_sentence = (
|
||||
f"The generated implementation currently targets {', '.join(tech_stack[:3])}."
|
||||
if tech_stack else
|
||||
"The implementation was created with the current default stack configured for the factory."
|
||||
)
|
||||
file_sentence = (
|
||||
f"Key artifacts were updated across {len(changed_files)} files, including {', '.join(changed_files[:3])}."
|
||||
if changed_files else
|
||||
"The service completed the generation flow, but no changed file list was returned."
|
||||
)
|
||||
if repo_url:
|
||||
repo_sentence = f"The resulting project is tracked at {repo_url}."
|
||||
elif repo_status in {'pending', 'skipped', 'error'}:
|
||||
repo_sentence = "Repository provisioning was not confirmed, so review the Gitea status in the dashboard before assuming a remote repo exists."
|
||||
else:
|
||||
repo_sentence = "The project is ready for further review in the dashboard."
|
||||
if pr_url and pr_state == 'open':
|
||||
pr_sentence = f"An open pull request is ready for review at {pr_url}, and later prompts will continue updating that same PR until it is merged."
|
||||
elif pr_url:
|
||||
pr_sentence = f"The latest pull request is available at {pr_url}."
|
||||
else:
|
||||
pr_sentence = "No pull request link was recorded for this delivery."
|
||||
return ' '.join([first_sentence, feature_sentence, tech_sentence, file_sentence, repo_sentence, pr_sentence])
|
||||
1948
ai_software_factory/agents/database_manager.py
Normal file
1948
ai_software_factory/agents/database_manager.py
Normal file
File diff suppressed because it is too large
Load Diff
179
ai_software_factory/agents/git_manager.py
Normal file
179
ai_software_factory/agents/git_manager.py
Normal file
@@ -0,0 +1,179 @@
|
||||
"""Git manager for project operations."""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
except ImportError:
|
||||
from config import settings
|
||||
|
||||
|
||||
class GitManager:
|
||||
"""Manages git operations for the project."""
|
||||
|
||||
def __init__(self, project_id: str, project_dir: str | None = None):
|
||||
if not project_id:
|
||||
raise ValueError("project_id cannot be empty or None")
|
||||
self.project_id = project_id
|
||||
if project_dir:
|
||||
resolved = Path(project_dir).expanduser().resolve()
|
||||
else:
|
||||
project_path = Path(project_id)
|
||||
if project_path.is_absolute() or len(project_path.parts) > 1:
|
||||
resolved = project_path.expanduser().resolve()
|
||||
else:
|
||||
base_root = settings.projects_root
|
||||
if base_root.name != "test-project":
|
||||
base_root = base_root / "test-project"
|
||||
resolved = (base_root / project_id).resolve()
|
||||
self.project_dir = str(resolved)
|
||||
|
||||
def _run(self, args: list[str], env: dict | None = None, check: bool = True) -> subprocess.CompletedProcess:
|
||||
"""Run a git command in the project directory."""
|
||||
return subprocess.run(
|
||||
args,
|
||||
check=check,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=self.project_dir,
|
||||
env=env,
|
||||
)
|
||||
|
||||
def has_repo(self) -> bool:
|
||||
"""Return whether the project directory already contains a git repository."""
|
||||
return Path(self.project_dir, '.git').exists()
|
||||
|
||||
def init_repo(self):
|
||||
"""Initialize git repository."""
|
||||
os.makedirs(self.project_dir, exist_ok=True)
|
||||
self._run(["git", "init", "-b", "main"])
|
||||
self._run(["git", "config", "user.name", "AI Software Factory"])
|
||||
self._run(["git", "config", "user.email", "factory@local.invalid"])
|
||||
|
||||
def add_files(self, paths: list[str]):
|
||||
"""Add files to git staging."""
|
||||
self._run(["git", "add"] + paths)
|
||||
|
||||
def checkout_branch(self, branch_name: str, create: bool = False, start_point: str | None = None) -> None:
|
||||
"""Switch to a branch, optionally creating it from a start point."""
|
||||
if create:
|
||||
args = ["git", "checkout", "-B", branch_name]
|
||||
if start_point:
|
||||
args.append(start_point)
|
||||
self._run(args)
|
||||
return
|
||||
self._run(["git", "checkout", branch_name])
|
||||
|
||||
def branch_exists(self, branch_name: str) -> bool:
|
||||
"""Return whether a local branch exists."""
|
||||
result = self._run(["git", "show-ref", "--verify", f"refs/heads/{branch_name}"], check=False)
|
||||
return result.returncode == 0
|
||||
|
||||
def commit(self, message: str) -> str:
|
||||
"""Create a git commit."""
|
||||
self._run(["git", "commit", "-m", message])
|
||||
return self.current_head()
|
||||
|
||||
def create_empty_commit(self, message: str) -> str:
|
||||
"""Create an empty commit."""
|
||||
self._run(["git", "commit", "--allow-empty", "-m", message])
|
||||
return self.current_head()
|
||||
|
||||
def push(self, remote: str = "origin", branch: str = "main"):
|
||||
"""Push changes to remote."""
|
||||
self._run(["git", "push", "-u", remote, branch])
|
||||
|
||||
def ensure_remote(self, remote: str, url: str) -> None:
|
||||
"""Create or update a remote URL."""
|
||||
result = self._run(["git", "remote", "get-url", remote], check=False)
|
||||
if result.returncode == 0:
|
||||
self._run(["git", "remote", "set-url", remote, url])
|
||||
else:
|
||||
self._run(["git", "remote", "add", remote, url])
|
||||
|
||||
def push_with_credentials(
|
||||
self,
|
||||
remote_url: str,
|
||||
username: str,
|
||||
password: str,
|
||||
remote: str = "origin",
|
||||
branch: str = "main",
|
||||
) -> None:
|
||||
"""Push to a remote over HTTPS using an askpass helper."""
|
||||
os.makedirs(self.project_dir, exist_ok=True)
|
||||
self.ensure_remote(remote, remote_url)
|
||||
helper_contents = "#!/bin/sh\ncase \"$1\" in\n *Username*) printf '%s\\n' \"$GIT_ASKPASS_USERNAME\" ;;\n *) printf '%s\\n' \"$GIT_ASKPASS_PASSWORD\" ;;\nesac\n"
|
||||
helper_path: str | None = None
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile('w', delete=False, dir=self.project_dir, prefix='git-askpass-', suffix='.sh') as helper_file:
|
||||
helper_file.write(helper_contents)
|
||||
helper_path = helper_file.name
|
||||
os.chmod(helper_path, 0o700)
|
||||
env = os.environ.copy()
|
||||
env.update(
|
||||
{
|
||||
"GIT_TERMINAL_PROMPT": "0",
|
||||
"GIT_ASKPASS": helper_path,
|
||||
"GIT_ASKPASS_USERNAME": username,
|
||||
"GIT_ASKPASS_PASSWORD": password,
|
||||
}
|
||||
)
|
||||
self._run(["git", "push", "-u", remote, branch], env=env)
|
||||
finally:
|
||||
if helper_path:
|
||||
Path(helper_path).unlink(missing_ok=True)
|
||||
|
||||
def create_branch(self, branch_name: str):
|
||||
"""Create and switch to a new branch."""
|
||||
self._run(["git", "checkout", "-b", branch_name])
|
||||
|
||||
def revert_commit(self, commit_hash: str, no_edit: bool = True) -> str:
|
||||
"""Revert a commit and return the new HEAD."""
|
||||
args = ["git", "revert"]
|
||||
if no_edit:
|
||||
args.append("--no-edit")
|
||||
args.append(commit_hash)
|
||||
self._run(args)
|
||||
return self.current_head()
|
||||
|
||||
def create_pr(
|
||||
self,
|
||||
title: str,
|
||||
body: str,
|
||||
base: str = "main",
|
||||
head: Optional[str] = None
|
||||
) -> dict:
|
||||
"""Create a pull request via gitea API."""
|
||||
# This would integrate with gitea API
|
||||
# For now, return placeholder
|
||||
return {
|
||||
"title": title,
|
||||
"body": body,
|
||||
"base": base,
|
||||
"head": head or f"ai-gen-{self.project_id}"
|
||||
}
|
||||
|
||||
def get_status(self) -> str:
|
||||
"""Get git status."""
|
||||
result = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=self.project_dir,
|
||||
)
|
||||
return result.stdout.strip()
|
||||
|
||||
def current_head(self) -> str:
|
||||
"""Return the current commit hash."""
|
||||
return self._run(["git", "rev-parse", "HEAD"]).stdout.strip()
|
||||
|
||||
def current_head_or_none(self) -> str | None:
|
||||
"""Return the current commit hash when the repository already has commits."""
|
||||
result = self._run(["git", "rev-parse", "HEAD"], check=False)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
return result.stdout.strip() or None
|
||||
364
ai_software_factory/agents/gitea.py
Normal file
364
ai_software_factory/agents/gitea.py
Normal file
@@ -0,0 +1,364 @@
|
||||
"""Gitea API integration for repository and pull request operations."""
|
||||
|
||||
import os
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import json
|
||||
|
||||
|
||||
class GiteaAPI:
|
||||
"""Gitea API client for repository operations."""
|
||||
|
||||
def __init__(self, token: str, base_url: str, owner: str | None = None, repo: str | None = None):
|
||||
self.token = token
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.owner = owner
|
||||
self.repo = repo
|
||||
self.headers = {
|
||||
"Authorization": f"token {token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def get_config(self) -> dict:
|
||||
"""Load configuration from environment."""
|
||||
base_url = os.getenv("GITEA_URL", "https://gitea.local")
|
||||
token = os.getenv("GITEA_TOKEN", "")
|
||||
owner = os.getenv("GITEA_OWNER", "ai-test")
|
||||
repo = os.getenv("GITEA_REPO", "")
|
||||
return {
|
||||
"base_url": base_url.rstrip("/"),
|
||||
"token": token,
|
||||
"owner": owner,
|
||||
"repo": repo,
|
||||
"supports_project_repos": not bool(repo),
|
||||
}
|
||||
|
||||
def get_auth_headers(self) -> dict:
|
||||
"""Get authentication headers."""
|
||||
return {
|
||||
"Authorization": f"token {self.token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def _api_url(self, path: str) -> str:
|
||||
"""Build a Gitea API URL from a relative path."""
|
||||
return f"{self.base_url}/api/v1/{path.lstrip('/')}"
|
||||
|
||||
def build_repo_git_url(self, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||
"""Build the clone URL for a repository."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
if not _owner or not _repo:
|
||||
return None
|
||||
return f"{self.base_url}/{_owner}/{_repo}.git"
|
||||
|
||||
def build_commit_url(self, commit_hash: str, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||
"""Build a browser URL for a commit."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
if not _owner or not _repo or not commit_hash:
|
||||
return None
|
||||
return f"{self.base_url}/{_owner}/{_repo}/commit/{commit_hash}"
|
||||
|
||||
def build_compare_url(self, base_ref: str, head_ref: str, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||
"""Build a browser URL for a compare view."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
if not _owner or not _repo or not base_ref or not head_ref:
|
||||
return None
|
||||
return f"{self.base_url}/{_owner}/{_repo}/compare/{base_ref}...{head_ref}"
|
||||
|
||||
def build_pull_request_url(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> str | None:
|
||||
"""Build a browser URL for a pull request."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
if not _owner or not _repo or not pr_number:
|
||||
return None
|
||||
return f"{self.base_url}/{_owner}/{_repo}/pulls/{pr_number}"
|
||||
|
||||
async def _request(self, method: str, path: str, payload: dict | None = None) -> dict:
|
||||
"""Perform a Gitea API request and normalize the response."""
|
||||
try:
|
||||
import aiohttp
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.request(
|
||||
method,
|
||||
self._api_url(path),
|
||||
headers=self.get_auth_headers(),
|
||||
json=payload,
|
||||
) as resp:
|
||||
if resp.status in (200, 201):
|
||||
return await resp.json()
|
||||
return {"error": await resp.text(), "status_code": resp.status}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
def _request_sync(self, method: str, path: str, payload: dict | None = None) -> dict:
|
||||
"""Perform a synchronous Gitea API request."""
|
||||
request = urllib.request.Request(
|
||||
self._api_url(path),
|
||||
headers=self.get_auth_headers(),
|
||||
method=method.upper(),
|
||||
)
|
||||
data = None
|
||||
if payload is not None:
|
||||
data = json.dumps(payload).encode('utf-8')
|
||||
request.data = data
|
||||
try:
|
||||
with urllib.request.urlopen(request) as response:
|
||||
body = response.read().decode('utf-8')
|
||||
return json.loads(body) if body else {}
|
||||
except urllib.error.HTTPError as exc:
|
||||
try:
|
||||
body = exc.read().decode('utf-8')
|
||||
except Exception:
|
||||
body = str(exc)
|
||||
return {'error': body, 'status_code': exc.code}
|
||||
except Exception as exc:
|
||||
return {'error': str(exc)}
|
||||
|
||||
def build_project_repo_name(self, project_id: str, project_name: str | None = None) -> str:
|
||||
"""Build a repository name for a generated project."""
|
||||
preferred = (project_name or project_id or "project").strip().lower().replace(" ", "-")
|
||||
sanitized = "".join(ch if ch.isalnum() or ch in {"-", "_"} else "-" for ch in preferred)
|
||||
while "--" in sanitized:
|
||||
sanitized = sanitized.replace("--", "-")
|
||||
return sanitized.strip("-") or project_id
|
||||
|
||||
async def create_repo(
|
||||
self,
|
||||
repo_name: str,
|
||||
owner: str | None = None,
|
||||
description: str | None = None,
|
||||
private: bool = False,
|
||||
auto_init: bool = True,
|
||||
) -> dict:
|
||||
"""Create a repository inside the configured organization."""
|
||||
_owner = owner or self.owner
|
||||
if not _owner:
|
||||
return {"error": "Owner or organization is required"}
|
||||
|
||||
payload = {
|
||||
"name": repo_name,
|
||||
"description": description or f"AI-generated project repository for {repo_name}",
|
||||
"private": private,
|
||||
"auto_init": auto_init,
|
||||
"default_branch": "main",
|
||||
}
|
||||
result = await self._request("POST", f"orgs/{_owner}/repos", payload)
|
||||
if result.get("status_code") == 409:
|
||||
existing = await self.get_repo_info(owner=_owner, repo=repo_name)
|
||||
if not existing.get("error"):
|
||||
existing["status"] = "exists"
|
||||
return existing
|
||||
if not result.get("error"):
|
||||
result.setdefault("status", "created")
|
||||
return result
|
||||
|
||||
async def get_current_user(self) -> dict:
|
||||
"""Get the user associated with the configured token."""
|
||||
return await self._request("GET", "user")
|
||||
|
||||
async def create_branch(self, branch: str, base: str = "main", owner: str | None = None, repo: str | None = None):
|
||||
"""Create a new branch."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request(
|
||||
"POST",
|
||||
f"repos/{_owner}/{_repo}/branches",
|
||||
{"new_branch_name": branch, "old_ref_name": base},
|
||||
)
|
||||
|
||||
async def create_pull_request(
|
||||
self,
|
||||
title: str,
|
||||
body: str,
|
||||
owner: str,
|
||||
repo: str,
|
||||
base: str = "main",
|
||||
head: str | None = None,
|
||||
) -> dict:
|
||||
"""Create a pull request."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
payload = {
|
||||
"title": title,
|
||||
"body": body,
|
||||
"base": base,
|
||||
"head": head or f"{_owner}-{_repo}-ai-gen-{hash(title) % 10000}",
|
||||
}
|
||||
return await self._request("POST", f"repos/{_owner}/{_repo}/pulls", payload)
|
||||
|
||||
async def list_pull_requests(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
state: str = 'open',
|
||||
) -> dict | list:
|
||||
"""List pull requests for a repository."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/pulls?state={state}")
|
||||
|
||||
def list_pull_requests_sync(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
state: str = 'open',
|
||||
) -> dict | list:
|
||||
"""Synchronously list pull requests for a repository."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/pulls?state={state}")
|
||||
|
||||
async def list_repositories(self, owner: str | None = None) -> dict | list:
|
||||
"""List repositories within the configured organization."""
|
||||
_owner = owner or self.owner
|
||||
return await self._request("GET", f"orgs/{_owner}/repos")
|
||||
|
||||
def list_repositories_sync(self, owner: str | None = None) -> dict | list:
|
||||
"""Synchronously list repositories within the configured organization."""
|
||||
_owner = owner or self.owner
|
||||
return self._request_sync("GET", f"orgs/{_owner}/repos")
|
||||
|
||||
async def list_branches(self, owner: str | None = None, repo: str | None = None) -> dict | list:
|
||||
"""List repository branches."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/branches")
|
||||
|
||||
def list_branches_sync(self, owner: str | None = None, repo: str | None = None) -> dict | list:
|
||||
"""Synchronously list repository branches."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/branches")
|
||||
|
||||
async def list_issues(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
state: str = 'open',
|
||||
) -> dict | list:
|
||||
"""List repository issues, excluding pull requests at the consumer layer."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/issues?state={state}")
|
||||
|
||||
def list_issues_sync(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
state: str = 'open',
|
||||
) -> dict | list:
|
||||
"""Synchronously list repository issues."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/issues?state={state}")
|
||||
|
||||
async def get_issue(self, issue_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||
"""Return one repository issue by number."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/issues/{issue_number}")
|
||||
|
||||
def get_issue_sync(self, issue_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||
"""Synchronously return one repository issue by number."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/issues/{issue_number}")
|
||||
|
||||
async def list_repo_commits(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
limit: int = 25,
|
||||
branch: str | None = None,
|
||||
) -> dict | list:
|
||||
"""List recent commits for a repository."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
branch_query = f"&sha={branch}" if branch else ""
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/commits?limit={limit}{branch_query}")
|
||||
|
||||
def list_repo_commits_sync(
|
||||
self,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
limit: int = 25,
|
||||
branch: str | None = None,
|
||||
) -> dict | list:
|
||||
"""Synchronously list recent commits for a repository."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
branch_query = f"&sha={branch}" if branch else ""
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/commits?limit={limit}{branch_query}")
|
||||
|
||||
async def get_commit(
|
||||
self,
|
||||
commit_hash: str,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
) -> dict:
|
||||
"""Return one commit by hash."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/git/commits/{commit_hash}")
|
||||
|
||||
def get_commit_sync(
|
||||
self,
|
||||
commit_hash: str,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
) -> dict:
|
||||
"""Synchronously return one commit by hash."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/git/commits/{commit_hash}")
|
||||
|
||||
async def get_pull_request(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||
"""Return one pull request by number."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}/pulls/{pr_number}")
|
||||
|
||||
def get_pull_request_sync(self, pr_number: int, owner: str | None = None, repo: str | None = None) -> dict:
|
||||
"""Synchronously return one pull request by number."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
return self._request_sync("GET", f"repos/{_owner}/{_repo}/pulls/{pr_number}")
|
||||
|
||||
async def push_commit(
|
||||
self,
|
||||
branch: str,
|
||||
files: list[dict],
|
||||
message: str,
|
||||
owner: str | None = None,
|
||||
repo: str | None = None,
|
||||
) -> dict:
|
||||
"""Push files to a branch.
|
||||
|
||||
In production, this would use gitea's API or git push.
|
||||
For now, this remains simulated.
|
||||
"""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
|
||||
return {
|
||||
"status": "simulated",
|
||||
"branch": branch,
|
||||
"message": message,
|
||||
"files": files,
|
||||
"owner": _owner,
|
||||
"repo": _repo,
|
||||
}
|
||||
|
||||
async def get_repo_info(self, owner: str | None = None, repo: str | None = None) -> dict:
|
||||
"""Get repository information."""
|
||||
_owner = owner or self.owner
|
||||
_repo = repo or self.repo
|
||||
|
||||
if not _repo:
|
||||
return {"error": "Repository name required for org operations"}
|
||||
|
||||
return await self._request("GET", f"repos/{_owner}/{_repo}")
|
||||
551
ai_software_factory/agents/n8n_setup.py
Normal file
551
ai_software_factory/agents/n8n_setup.py
Normal file
@@ -0,0 +1,551 @@
|
||||
"""n8n setup agent for automatic webhook configuration."""
|
||||
|
||||
import json
|
||||
from urllib import error as urllib_error
|
||||
from urllib import request as urllib_request
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
except ImportError:
|
||||
from config import settings
|
||||
|
||||
|
||||
class N8NSetupAgent:
|
||||
"""Automatically configures n8n webhooks and workflows using API token authentication."""
|
||||
|
||||
def __init__(self, api_url: str, webhook_token: str):
|
||||
"""Initialize n8n setup agent.
|
||||
|
||||
Args:
|
||||
api_url: n8n API URL (e.g., http://n8n.yourserver.com)
|
||||
webhook_token: n8n webhook token for API access (more secure than username/password)
|
||||
|
||||
Note: Set the webhook token in n8n via Settings > Credentials > Webhook
|
||||
This token is used for all API requests instead of Basic Auth
|
||||
"""
|
||||
self.api_url = api_url.rstrip("/")
|
||||
self.webhook_token = webhook_token
|
||||
self.session = None
|
||||
|
||||
def _api_path(self, path: str) -> str:
|
||||
"""Build a full n8n API URL for a given endpoint path."""
|
||||
if path.startswith("http://") or path.startswith("https://"):
|
||||
return path
|
||||
trimmed = path.lstrip("/")
|
||||
if trimmed.startswith("api/"):
|
||||
return f"{self.api_url}/{trimmed}"
|
||||
return f"{self.api_url}/api/v1/{trimmed}"
|
||||
|
||||
def get_auth_headers(self) -> dict:
|
||||
"""Get authentication headers for n8n API using webhook token."""
|
||||
headers = {
|
||||
"n8n-no-credentials": "true",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "AI-Software-Factory"
|
||||
}
|
||||
if self.webhook_token:
|
||||
headers["X-N8N-API-KEY"] = self.webhook_token
|
||||
return headers
|
||||
|
||||
def _extract_message(self, payload: object) -> str:
|
||||
"""Extract a useful message from an n8n response payload."""
|
||||
if isinstance(payload, dict):
|
||||
for key in ("message", "error", "reason", "hint", "text"):
|
||||
value = payload.get(key)
|
||||
if value:
|
||||
return str(value)
|
||||
if payload:
|
||||
return json.dumps(payload)
|
||||
if payload is None:
|
||||
return "No response body"
|
||||
return str(payload)
|
||||
|
||||
def _normalize_success(self, method: str, url: str, status_code: int, payload: object) -> dict:
|
||||
"""Normalize a successful n8n API response."""
|
||||
if isinstance(payload, dict):
|
||||
response = dict(payload)
|
||||
response.setdefault("status_code", status_code)
|
||||
response.setdefault("url", url)
|
||||
response.setdefault("method", method)
|
||||
return response
|
||||
return {"data": payload, "status_code": status_code, "url": url, "method": method}
|
||||
|
||||
def _normalize_error(self, method: str, url: str, status_code: int | None, payload: object) -> dict:
|
||||
"""Normalize an error response with enough detail for diagnostics."""
|
||||
message = self._extract_message(payload)
|
||||
prefix = f"{method} {url}"
|
||||
if status_code is not None:
|
||||
return {
|
||||
"error": f"{prefix} returned {status_code}: {message}",
|
||||
"message": message,
|
||||
"status_code": status_code,
|
||||
"url": url,
|
||||
"method": method,
|
||||
"payload": payload,
|
||||
}
|
||||
return {
|
||||
"error": f"{prefix} failed: {message}",
|
||||
"message": message,
|
||||
"status_code": None,
|
||||
"url": url,
|
||||
"method": method,
|
||||
"payload": payload,
|
||||
}
|
||||
|
||||
def _health_check_row(self, name: str, result: dict) -> dict:
|
||||
"""Convert a raw request result into a UI/API-friendly health check row."""
|
||||
return {
|
||||
"name": name,
|
||||
"ok": not bool(result.get("error")),
|
||||
"url": result.get("url"),
|
||||
"method": result.get("method", "GET"),
|
||||
"status_code": result.get("status_code"),
|
||||
"message": result.get("message") or ("ok" if not result.get("error") else result.get("error")),
|
||||
}
|
||||
|
||||
def _health_suggestion(self, checks: list[dict]) -> str | None:
|
||||
"""Return a suggestion based on failed n8n health checks."""
|
||||
status_codes = {check.get("status_code") for check in checks if check.get("status_code") is not None}
|
||||
if status_codes and status_codes.issubset({404}):
|
||||
return "Verify N8N_API_URL points to the base n8n URL, for example http://host:5678, not /api/v1 or a webhook URL."
|
||||
if status_codes & {401, 403}:
|
||||
return "Check the configured n8n API key or authentication method."
|
||||
return "Verify the n8n URL, API key, and that the n8n API is reachable from this container."
|
||||
|
||||
def _build_health_result(self, healthz_result: dict, workflows_result: dict) -> dict:
|
||||
"""Build a consolidated health result from the performed checks."""
|
||||
checks = [
|
||||
self._health_check_row("healthz", healthz_result),
|
||||
self._health_check_row("workflows", workflows_result),
|
||||
]
|
||||
|
||||
if not healthz_result.get("error"):
|
||||
return {
|
||||
"status": "ok",
|
||||
"message": "n8n is reachable via /healthz.",
|
||||
"api_url": self.api_url,
|
||||
"auth_configured": bool(self.webhook_token),
|
||||
"checked_via": "healthz",
|
||||
"checks": checks,
|
||||
}
|
||||
|
||||
if not workflows_result.get("error"):
|
||||
workflows = workflows_result.get("data")
|
||||
workflow_count = len(workflows) if isinstance(workflows, list) else None
|
||||
return {
|
||||
"status": "ok",
|
||||
"message": "n8n is reachable via the workflows API, but /healthz is unavailable.",
|
||||
"api_url": self.api_url,
|
||||
"auth_configured": bool(self.webhook_token),
|
||||
"checked_via": "workflows",
|
||||
"workflow_count": workflow_count,
|
||||
"checks": checks,
|
||||
}
|
||||
|
||||
suggestion = self._health_suggestion(checks)
|
||||
return {
|
||||
"status": "error",
|
||||
"error": "n8n health checks failed",
|
||||
"message": "n8n health checks failed.",
|
||||
"api_url": self.api_url,
|
||||
"auth_configured": bool(self.webhook_token),
|
||||
"checked_via": "none",
|
||||
"checks": checks,
|
||||
"suggestion": suggestion,
|
||||
}
|
||||
|
||||
async def _request(self, method: str, path: str, **kwargs) -> dict:
|
||||
"""Send a request to n8n and normalize the response."""
|
||||
import aiohttp
|
||||
|
||||
headers = kwargs.pop("headers", None) or self.get_auth_headers()
|
||||
url = self._api_path(path)
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.request(method, url, headers=headers, **kwargs) as resp:
|
||||
content_type = resp.headers.get("Content-Type", "")
|
||||
if "application/json" in content_type:
|
||||
payload = await resp.json()
|
||||
else:
|
||||
payload = {"text": await resp.text()}
|
||||
|
||||
if 200 <= resp.status < 300:
|
||||
return self._normalize_success(method, url, resp.status, payload)
|
||||
|
||||
return self._normalize_error(method, url, resp.status, payload)
|
||||
except Exception as e:
|
||||
return self._normalize_error(method, url, None, {"message": str(e)})
|
||||
|
||||
def _request_sync(self, method: str, path: str, **kwargs) -> dict:
|
||||
"""Send a synchronous request to n8n for dashboard health snapshots."""
|
||||
headers = kwargs.pop("headers", None) or self.get_auth_headers()
|
||||
payload = kwargs.pop("json", None)
|
||||
timeout = kwargs.pop("timeout", 5)
|
||||
url = self._api_path(path)
|
||||
data = None
|
||||
if payload is not None:
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
req = urllib_request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib_request.urlopen(req, timeout=timeout) as resp:
|
||||
raw_body = resp.read().decode("utf-8")
|
||||
content_type = resp.headers.get("Content-Type", "")
|
||||
if "application/json" in content_type and raw_body:
|
||||
parsed = json.loads(raw_body)
|
||||
elif raw_body:
|
||||
parsed = {"text": raw_body}
|
||||
else:
|
||||
parsed = {}
|
||||
return self._normalize_success(method, url, resp.status, parsed)
|
||||
except urllib_error.HTTPError as exc:
|
||||
raw_body = exc.read().decode("utf-8") if exc.fp else ""
|
||||
try:
|
||||
parsed = json.loads(raw_body) if raw_body else {}
|
||||
except json.JSONDecodeError:
|
||||
parsed = {"text": raw_body} if raw_body else {}
|
||||
return self._normalize_error(method, url, exc.code, parsed)
|
||||
except Exception as exc:
|
||||
return self._normalize_error(method, url, None, {"message": str(exc)})
|
||||
|
||||
async def get_workflow(self, workflow_name: str) -> Optional[dict]:
|
||||
"""Get a workflow by name."""
|
||||
workflows = await self.list_workflows()
|
||||
if isinstance(workflows, dict) and workflows.get("error"):
|
||||
return workflows
|
||||
for workflow in workflows:
|
||||
if workflow.get("name") == workflow_name:
|
||||
return workflow
|
||||
return None
|
||||
|
||||
async def create_workflow(self, workflow_json: dict) -> dict:
|
||||
"""Create or update a workflow."""
|
||||
return await self._request("POST", "workflows", json=self._workflow_payload(workflow_json))
|
||||
|
||||
def _workflow_payload(self, workflow_json: dict) -> dict:
|
||||
"""Return a workflow payload without server-managed read-only fields."""
|
||||
payload = dict(workflow_json)
|
||||
payload.pop("active", None)
|
||||
payload.pop("id", None)
|
||||
payload.pop("createdAt", None)
|
||||
payload.pop("updatedAt", None)
|
||||
payload.pop("versionId", None)
|
||||
return payload
|
||||
|
||||
async def _update_workflow_via_put(self, workflow_id: str, workflow_json: dict) -> dict:
|
||||
"""Fallback update path for n8n instances that only support PUT."""
|
||||
return await self._request("PUT", f"workflows/{workflow_id}", json=self._workflow_payload(workflow_json))
|
||||
|
||||
async def update_workflow(self, workflow_id: str, workflow_json: dict) -> dict:
|
||||
"""Update an existing workflow."""
|
||||
result = await self._request("PATCH", f"workflows/{workflow_id}", json=self._workflow_payload(workflow_json))
|
||||
if result.get("status_code") == 405:
|
||||
fallback = await self._update_workflow_via_put(workflow_id, workflow_json)
|
||||
if not fallback.get("error") and isinstance(fallback, dict):
|
||||
fallback.setdefault("method", "PUT")
|
||||
return fallback
|
||||
return result
|
||||
|
||||
async def enable_workflow(self, workflow_id: str) -> dict:
|
||||
"""Enable a workflow."""
|
||||
result = await self._request("POST", f"workflows/{workflow_id}/activate")
|
||||
if result.get("error"):
|
||||
fallback = await self._request("PATCH", f"workflows/{workflow_id}", json={"active": True})
|
||||
if fallback.get("error"):
|
||||
if fallback.get("status_code") == 405:
|
||||
put_fallback = await self._request("PUT", f"workflows/{workflow_id}", json={"active": True})
|
||||
if put_fallback.get("error"):
|
||||
return put_fallback
|
||||
return {"success": True, "id": workflow_id, "method": "put"}
|
||||
return fallback
|
||||
return {"success": True, "id": workflow_id, "method": "patch"}
|
||||
return {"success": True, "id": workflow_id, "method": "activate"}
|
||||
|
||||
async def list_workflows(self) -> list:
|
||||
"""List all workflows."""
|
||||
result = await self._request("GET", "workflows")
|
||||
if result.get("error"):
|
||||
return result
|
||||
if isinstance(result, list):
|
||||
return result
|
||||
if isinstance(result, dict):
|
||||
for key in ("data", "workflows"):
|
||||
value = result.get(key)
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return []
|
||||
|
||||
def build_telegram_workflow(self, webhook_path: str, backend_url: str, allowed_chat_id: str | None = None) -> dict:
|
||||
"""Build the Telegram-to-backend workflow definition."""
|
||||
normalized_path = webhook_path.strip().strip("/") or "telegram"
|
||||
allowed_chat = json.dumps(str(allowed_chat_id)) if allowed_chat_id else "''"
|
||||
return {
|
||||
"name": "Telegram to AI Software Factory",
|
||||
"settings": {"executionOrder": "v1"},
|
||||
"nodes": [
|
||||
{
|
||||
"id": "webhook-node",
|
||||
"name": "Telegram Webhook",
|
||||
"type": "n8n-nodes-base.webhook",
|
||||
"typeVersion": 2,
|
||||
"position": [-520, 120],
|
||||
"parameters": {
|
||||
"httpMethod": "POST",
|
||||
"path": normalized_path,
|
||||
"responseMode": "responseNode",
|
||||
"options": {},
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "parse-node",
|
||||
"name": "Prepare Freeform Request",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [-200, 120],
|
||||
"parameters": {
|
||||
"language": "javaScript",
|
||||
"jsCode": f"const allowedChatId = {allowed_chat};\nconst body = $json.body ?? $json;\nconst message = body.message ?? body;\nconst text = String(message.text ?? '').trim();\nconst chatId = String(message.chat?.id ?? '');\nif (allowedChatId && chatId !== allowedChatId) {{\n return [{{ json: {{ ignored: true, message: `Ignoring message from chat ${{chatId}}`, prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];\n}}\nreturn [{{ json: {{ prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "api-node",
|
||||
"name": "AI Software Factory API",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"typeVersion": 4.2,
|
||||
"position": [120, 120],
|
||||
"parameters": {
|
||||
"method": "POST",
|
||||
"url": backend_url,
|
||||
"sendBody": True,
|
||||
"specifyBody": "json",
|
||||
"jsonBody": "={{ $json }}",
|
||||
"options": {"response": {"response": {"fullResponse": False}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "response-node",
|
||||
"name": "Respond to Telegram Webhook",
|
||||
"type": "n8n-nodes-base.respondToWebhook",
|
||||
"typeVersion": 1.2,
|
||||
"position": [420, 120],
|
||||
"parameters": {
|
||||
"respondWith": "json",
|
||||
"responseBody": "={{ $json }}",
|
||||
},
|
||||
},
|
||||
],
|
||||
"connections": {
|
||||
"Telegram Webhook": {"main": [[{"node": "Prepare Freeform Request", "type": "main", "index": 0}]]},
|
||||
"Prepare Freeform Request": {"main": [[{"node": "AI Software Factory API", "type": "main", "index": 0}]]},
|
||||
"AI Software Factory API": {"main": [[{"node": "Respond to Telegram Webhook", "type": "main", "index": 0}]]},
|
||||
},
|
||||
}
|
||||
|
||||
def build_telegram_trigger_workflow(
|
||||
self,
|
||||
backend_url: str,
|
||||
credential_name: str,
|
||||
allowed_chat_id: str | None = None,
|
||||
) -> dict:
|
||||
"""Build a production Telegram Trigger based workflow."""
|
||||
allowed_chat = json.dumps(str(allowed_chat_id)) if allowed_chat_id else "''"
|
||||
return {
|
||||
"name": "Telegram to AI Software Factory",
|
||||
"settings": {"executionOrder": "v1"},
|
||||
"nodes": [
|
||||
{
|
||||
"id": "telegram-trigger-node",
|
||||
"name": "Telegram Trigger",
|
||||
"type": "n8n-nodes-base.telegramTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [-520, 120],
|
||||
"parameters": {"updates": ["message", "channel_post"]},
|
||||
"credentials": {"telegramApi": {"name": credential_name}},
|
||||
},
|
||||
{
|
||||
"id": "filter-node",
|
||||
"name": "Prepare Freeform Request",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [-180, 120],
|
||||
"parameters": {
|
||||
"language": "javaScript",
|
||||
"jsCode": f"const allowedChatId = {allowed_chat};\nconst message = $json.message ?? $json.channel_post ?? $json;\nconst text = String(message.text ?? '').trim();\nconst chatId = String(message.chat?.id ?? '');\nif (!text) return [];\nif (allowedChatId && chatId !== allowedChatId) return [];\nreturn [{{ json: {{ prompt_text: text, source: 'telegram', chat_id: chatId, chat_type: message.chat?.type ?? null }} }}];",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "api-node",
|
||||
"name": "AI Software Factory API",
|
||||
"type": "n8n-nodes-base.httpRequest",
|
||||
"typeVersion": 4.2,
|
||||
"position": [120, 120],
|
||||
"parameters": {
|
||||
"method": "POST",
|
||||
"url": backend_url,
|
||||
"sendBody": True,
|
||||
"specifyBody": "json",
|
||||
"jsonBody": "={{ $json }}",
|
||||
"options": {"response": {"response": {"fullResponse": False}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "reply-node",
|
||||
"name": "Send Telegram Update",
|
||||
"type": "n8n-nodes-base.telegram",
|
||||
"typeVersion": 1,
|
||||
"position": [420, 120],
|
||||
"parameters": {
|
||||
"resource": "message",
|
||||
"operation": "sendMessage",
|
||||
"chatId": "={{ ($('Telegram Trigger').item.json.message ?? $('Telegram Trigger').item.json.channel_post).chat.id }}",
|
||||
"text": "={{ $json.summary_message || $json.data?.summary_message || $json.message || 'Software generation request accepted' }}",
|
||||
},
|
||||
"credentials": {"telegramApi": {"name": credential_name}},
|
||||
},
|
||||
],
|
||||
"connections": {
|
||||
"Telegram Trigger": {"main": [[{"node": "Prepare Freeform Request", "type": "main", "index": 0}]]},
|
||||
"Prepare Freeform Request": {"main": [[{"node": "AI Software Factory API", "type": "main", "index": 0}]]},
|
||||
"AI Software Factory API": {"main": [[{"node": "Send Telegram Update", "type": "main", "index": 0}]]},
|
||||
},
|
||||
}
|
||||
|
||||
async def list_credentials(self) -> list:
|
||||
"""List n8n credentials."""
|
||||
result = await self._request("GET", "credentials")
|
||||
if result.get("error"):
|
||||
return []
|
||||
if isinstance(result, list):
|
||||
return result
|
||||
if isinstance(result, dict):
|
||||
for key in ("data", "credentials"):
|
||||
value = result.get(key)
|
||||
if isinstance(value, list):
|
||||
return value
|
||||
return []
|
||||
|
||||
async def get_credential(self, credential_name: str, credential_type: str = "telegramApi") -> Optional[dict]:
|
||||
"""Get an existing credential by name and type."""
|
||||
credentials = await self.list_credentials()
|
||||
for credential in credentials:
|
||||
if credential.get("name") == credential_name and credential.get("type") == credential_type:
|
||||
return credential
|
||||
return None
|
||||
|
||||
async def create_credential(self, name: str, credential_type: str, data: dict) -> dict:
|
||||
"""Create an n8n credential."""
|
||||
payload = {"name": name, "type": credential_type, "data": data}
|
||||
return await self._request("POST", "credentials", json=payload)
|
||||
|
||||
async def ensure_telegram_credential(self, bot_token: str, credential_name: str) -> dict:
|
||||
"""Ensure a Telegram credential exists for the workflow trigger."""
|
||||
existing = await self.get_credential(credential_name)
|
||||
if existing:
|
||||
return existing
|
||||
return await self.create_credential(
|
||||
name=credential_name,
|
||||
credential_type="telegramApi",
|
||||
data={"accessToken": bot_token},
|
||||
)
|
||||
|
||||
async def setup_telegram_workflow(self, webhook_path: str) -> dict:
|
||||
"""Setup the Telegram webhook workflow in n8n.
|
||||
|
||||
Args:
|
||||
webhook_path: The webhook path (e.g., /webhook/telegram)
|
||||
|
||||
Returns:
|
||||
Result of setup operation
|
||||
"""
|
||||
return await self.setup(
|
||||
webhook_path=webhook_path,
|
||||
backend_url=f"{settings.backend_public_url}/generate/text",
|
||||
force_update=False,
|
||||
)
|
||||
|
||||
async def health_check(self) -> dict:
|
||||
"""Check n8n API health."""
|
||||
result = await self._request("GET", f"{self.api_url}/healthz")
|
||||
fallback = await self._request("GET", "workflows")
|
||||
return self._build_health_result(result, fallback)
|
||||
|
||||
def health_check_sync(self) -> dict:
|
||||
"""Synchronously check n8n API health for UI rendering."""
|
||||
result = self._request_sync("GET", f"{self.api_url}/healthz")
|
||||
fallback = self._request_sync("GET", "workflows")
|
||||
return self._build_health_result(result, fallback)
|
||||
|
||||
async def setup(
|
||||
self,
|
||||
webhook_path: str = "telegram",
|
||||
backend_url: str | None = None,
|
||||
force_update: bool = False,
|
||||
use_telegram_trigger: bool | None = None,
|
||||
telegram_bot_token: str | None = None,
|
||||
telegram_credential_name: str | None = None,
|
||||
) -> dict:
|
||||
"""Setup n8n webhooks automatically."""
|
||||
# First, verify n8n is accessible
|
||||
health = await self.health_check()
|
||||
if health.get("error"):
|
||||
return {
|
||||
"status": "error",
|
||||
"message": health.get("message") or health.get("error"),
|
||||
"health": health,
|
||||
"checks": health.get("checks", []),
|
||||
"suggestion": health.get("suggestion"),
|
||||
}
|
||||
|
||||
effective_backend_url = backend_url or f"{settings.backend_public_url}/generate/text"
|
||||
effective_bot_token = telegram_bot_token or settings.telegram_bot_token
|
||||
effective_credential_name = telegram_credential_name or settings.n8n_telegram_credential_name
|
||||
trigger_mode = use_telegram_trigger if use_telegram_trigger is not None else bool(effective_bot_token)
|
||||
|
||||
if trigger_mode:
|
||||
credential = await self.ensure_telegram_credential(effective_bot_token, effective_credential_name)
|
||||
if credential.get("error"):
|
||||
return {"status": "error", "message": credential["error"], "details": credential}
|
||||
workflow = self.build_telegram_trigger_workflow(
|
||||
backend_url=effective_backend_url,
|
||||
credential_name=effective_credential_name,
|
||||
allowed_chat_id=settings.telegram_chat_id,
|
||||
)
|
||||
else:
|
||||
workflow = self.build_telegram_workflow(
|
||||
webhook_path=webhook_path,
|
||||
backend_url=effective_backend_url,
|
||||
allowed_chat_id=settings.telegram_chat_id,
|
||||
)
|
||||
|
||||
existing = await self.get_workflow(workflow["name"])
|
||||
if isinstance(existing, dict) and existing.get("error"):
|
||||
return {"status": "error", "message": existing["error"], "details": existing}
|
||||
|
||||
workflow_id = None
|
||||
if existing and existing.get("id"):
|
||||
workflow_id = str(existing["id"])
|
||||
if force_update:
|
||||
result = await self.update_workflow(workflow_id, workflow)
|
||||
else:
|
||||
result = existing
|
||||
else:
|
||||
result = await self.create_workflow(workflow)
|
||||
workflow_id = str(result.get("id", "")) if isinstance(result, dict) else None
|
||||
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
return {"status": "error", "message": result["error"], "details": result}
|
||||
|
||||
workflow_id = workflow_id or str(result.get("id", ""))
|
||||
enable_result = await self.enable_workflow(workflow_id)
|
||||
if enable_result.get("error"):
|
||||
return {"status": "error", "message": enable_result["error"], "workflow": result, "details": enable_result}
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f'Workflow "{workflow["name"]}" is active',
|
||||
"workflow_id": workflow_id,
|
||||
"workflow_name": workflow["name"],
|
||||
"webhook_path": webhook_path.strip().strip("/") or "telegram",
|
||||
"backend_url": effective_backend_url,
|
||||
"trigger_mode": "telegram" if trigger_mode else "webhook",
|
||||
}
|
||||
697
ai_software_factory/agents/orchestrator.py
Normal file
697
ai_software_factory/agents/orchestrator.py
Normal file
@@ -0,0 +1,697 @@
|
||||
"""Agent orchestrator for software generation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import difflib
|
||||
import py_compile
|
||||
import re
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
from .database_manager import DatabaseManager
|
||||
from .git_manager import GitManager
|
||||
from .gitea import GiteaAPI
|
||||
from .ui_manager import UIManager
|
||||
except ImportError:
|
||||
from config import settings
|
||||
from agents.database_manager import DatabaseManager
|
||||
from agents.git_manager import GitManager
|
||||
from agents.gitea import GiteaAPI
|
||||
from agents.ui_manager import UIManager
|
||||
|
||||
|
||||
class AgentOrchestrator:
|
||||
"""Orchestrates the software generation process with full audit trail."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
project_id: str,
|
||||
project_name: str,
|
||||
description: str,
|
||||
features: list,
|
||||
tech_stack: list,
|
||||
db=None,
|
||||
prompt_text: str | None = None,
|
||||
prompt_actor: str = "api",
|
||||
existing_history=None,
|
||||
prompt_source_context: dict | None = None,
|
||||
prompt_routing: dict | None = None,
|
||||
related_issue_hint: dict | None = None,
|
||||
):
|
||||
"""Initialize orchestrator."""
|
||||
self.project_id = project_id
|
||||
self.project_name = project_name
|
||||
self.description = description
|
||||
self.features = features
|
||||
self.tech_stack = tech_stack
|
||||
self.status = "initialized"
|
||||
self.progress = 0
|
||||
self.current_step = None
|
||||
self.message = ""
|
||||
self.logs = []
|
||||
self.ui_data = {}
|
||||
self.db = db
|
||||
self.prompt_text = prompt_text
|
||||
self.prompt_actor = prompt_actor
|
||||
self.prompt_source_context = prompt_source_context or {}
|
||||
self.prompt_routing = prompt_routing or {}
|
||||
self.existing_history = existing_history
|
||||
self.changed_files: list[str] = []
|
||||
self.gitea_api = GiteaAPI(
|
||||
token=settings.GITEA_TOKEN,
|
||||
base_url=settings.GITEA_URL,
|
||||
owner=settings.GITEA_OWNER,
|
||||
repo=settings.GITEA_REPO or ""
|
||||
)
|
||||
self.project_root = settings.projects_root / project_id
|
||||
self.prompt_audit = None
|
||||
self.repo_name = settings.gitea_repo or self.gitea_api.build_project_repo_name(project_id, project_name)
|
||||
self.repo_owner = settings.gitea_owner
|
||||
self.repo_url = None
|
||||
self.branch_name = self._build_pr_branch_name(project_id)
|
||||
self.active_pull_request = None
|
||||
self._gitea_username: str | None = None
|
||||
hinted_issue_number = (related_issue_hint or {}).get('number') if related_issue_hint else None
|
||||
self.related_issue_number = hinted_issue_number if hinted_issue_number is not None else self._extract_issue_number(prompt_text)
|
||||
self.related_issue: dict | None = DatabaseManager._normalize_issue(related_issue_hint)
|
||||
|
||||
# Initialize agents
|
||||
self.git_manager = GitManager(project_id, project_dir=str(self.project_root))
|
||||
self.ui_manager = UIManager(project_id)
|
||||
|
||||
# Initialize database manager if db session provided
|
||||
self.db_manager = None
|
||||
self.history = None
|
||||
if db:
|
||||
self.db_manager = DatabaseManager(db)
|
||||
if existing_history is not None:
|
||||
self.history = existing_history
|
||||
self.project_id = existing_history.project_id
|
||||
self.project_name = existing_history.project_name or project_name
|
||||
self.description = existing_history.description or description
|
||||
else:
|
||||
self.history = self.db_manager.log_project_start(
|
||||
project_id=project_id,
|
||||
project_name=project_name,
|
||||
description=description
|
||||
)
|
||||
self.db_manager = DatabaseManager(db)
|
||||
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id)
|
||||
if existing_history is not None and self.history is not None:
|
||||
latest_ui = self.db_manager._get_latest_ui_snapshot_data(self.history.id)
|
||||
repository = latest_ui.get('repository') if isinstance(latest_ui, dict) else None
|
||||
if isinstance(repository, dict) and repository:
|
||||
self.repo_owner = repository.get('owner') or self.repo_owner
|
||||
self.repo_name = repository.get('name') or self.repo_name
|
||||
self.repo_url = repository.get('url') or self.repo_url
|
||||
if self.prompt_text:
|
||||
self.prompt_audit = self.db_manager.log_prompt_submission(
|
||||
history_id=self.history.id,
|
||||
project_id=self.project_id,
|
||||
prompt_text=self.prompt_text,
|
||||
features=self.features,
|
||||
tech_stack=self.tech_stack,
|
||||
actor_name=self.prompt_actor,
|
||||
related_issue={'number': self.related_issue_number} if self.related_issue_number is not None else None,
|
||||
source_context=self.prompt_source_context,
|
||||
routing=self.prompt_routing,
|
||||
)
|
||||
|
||||
self.ui_manager.ui_data["project_root"] = str(self.project_root)
|
||||
self.ui_manager.ui_data["features"] = list(self.features)
|
||||
self.ui_manager.ui_data["tech_stack"] = list(self.tech_stack)
|
||||
self.ui_manager.ui_data["repository"] = {
|
||||
"owner": self.repo_owner,
|
||||
"name": self.repo_name,
|
||||
"mode": "project" if settings.use_project_repositories else "shared",
|
||||
"status": "pending" if settings.use_project_repositories else "shared",
|
||||
"provider": "gitea",
|
||||
}
|
||||
if self.related_issue:
|
||||
self.ui_manager.ui_data["related_issue"] = self.related_issue
|
||||
if self.active_pull_request:
|
||||
self.ui_manager.ui_data["pull_request"] = self.active_pull_request
|
||||
|
||||
def _build_pr_branch_name(self, project_id: str) -> str:
|
||||
"""Build a stable branch name used until the PR is merged."""
|
||||
return f"ai/{project_id}"
|
||||
|
||||
def _extract_issue_number(self, prompt_text: str | None) -> int | None:
|
||||
"""Extract an issue reference from prompt text."""
|
||||
if not prompt_text:
|
||||
return None
|
||||
match = re.search(r'(?:#|issue\s+)(\d+)', prompt_text, flags=re.IGNORECASE)
|
||||
return int(match.group(1)) if match else None
|
||||
|
||||
def _build_repo_url(self, owner: str | None, repo: str | None) -> str | None:
|
||||
if not owner or not repo or not settings.gitea_url:
|
||||
return None
|
||||
return f"{settings.gitea_url.rstrip('/')}/{owner}/{repo}"
|
||||
|
||||
def _log_generation_plan_trace(self) -> None:
|
||||
"""Persist the current generation plan as an inspectable trace."""
|
||||
if not self.db_manager or not self.history or not self.prompt_audit:
|
||||
return
|
||||
planned_files = list(self._template_files().keys())
|
||||
self.db_manager.log_llm_trace(
|
||||
project_id=self.project_id,
|
||||
history_id=self.history.id,
|
||||
prompt_id=self.prompt_audit.id,
|
||||
stage='generation_plan',
|
||||
provider='factory-planner',
|
||||
model='template-generator',
|
||||
system_prompt='Plan the generated project structure from the structured request and repository state.',
|
||||
user_prompt=self.prompt_text or self.description,
|
||||
assistant_response=(
|
||||
f"Planned files: {', '.join(planned_files)}. "
|
||||
f"Target branch: {self.branch_name}. "
|
||||
f"Repository mode: {self.ui_manager.ui_data.get('repository', {}).get('mode', 'unknown')}."
|
||||
+ (
|
||||
f" Linked issue: #{self.related_issue.get('number')} {self.related_issue.get('title')}."
|
||||
if self.related_issue else ''
|
||||
)
|
||||
),
|
||||
raw_response={
|
||||
'planned_files': planned_files,
|
||||
'features': list(self.features),
|
||||
'tech_stack': list(self.tech_stack),
|
||||
'branch': self.branch_name,
|
||||
'repository': self.ui_manager.ui_data.get('repository', {}),
|
||||
'related_issue': self.related_issue,
|
||||
},
|
||||
fallback_used=False,
|
||||
)
|
||||
|
||||
async def _sync_issue_context(self) -> None:
|
||||
"""Sync repository issues and resolve a linked issue from the prompt when present."""
|
||||
if not self.db_manager or not self.history:
|
||||
return
|
||||
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||
owner = repository.get('owner') or self.repo_owner
|
||||
repo_name = repository.get('name') or self.repo_name
|
||||
if not owner or not repo_name or not settings.gitea_url or not settings.gitea_token:
|
||||
return
|
||||
issues_result = self.db_manager.sync_repository_issues(project_id=self.project_id, gitea_api=self.gitea_api, state='open')
|
||||
self.ui_manager.ui_data['issues'] = issues_result.get('issues', []) if issues_result.get('status') == 'success' else []
|
||||
if self.related_issue_number is None:
|
||||
return
|
||||
issue_payload = await self.gitea_api.get_issue(issue_number=self.related_issue_number, owner=owner, repo=repo_name)
|
||||
if isinstance(issue_payload, dict) and issue_payload.get('error'):
|
||||
return
|
||||
if issue_payload.get('pull_request'):
|
||||
return
|
||||
self.related_issue = DatabaseManager._normalize_issue(issue_payload)
|
||||
self.ui_manager.ui_data['related_issue'] = self.related_issue
|
||||
if self.prompt_audit:
|
||||
self.db_manager.attach_issue_to_prompt(self.prompt_audit.id, self.related_issue)
|
||||
|
||||
async def _ensure_remote_repository(self) -> None:
|
||||
if not settings.use_project_repositories:
|
||||
self.ui_manager.ui_data["repository"]["status"] = "shared"
|
||||
if settings.gitea_repo:
|
||||
predicted_url = self._build_repo_url(self.repo_owner, self.repo_name)
|
||||
if predicted_url:
|
||||
self.repo_url = predicted_url
|
||||
self.ui_manager.ui_data["repository"]["url"] = predicted_url
|
||||
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||
"status": "shared",
|
||||
"detail": "Using the configured shared repository instead of provisioning a per-project repo.",
|
||||
}
|
||||
return
|
||||
if not self.repo_owner or not settings.gitea_token or not settings.gitea_url:
|
||||
self.ui_manager.ui_data["repository"]["status"] = "skipped"
|
||||
self.ui_manager.ui_data["repository"]["reason"] = "Missing Gitea owner, URL, or token configuration"
|
||||
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||
"status": "skipped",
|
||||
"detail": "Missing Gitea owner, URL, or token configuration",
|
||||
}
|
||||
return
|
||||
|
||||
repo_name = self.repo_name
|
||||
result = await self.gitea_api.create_repo(
|
||||
repo_name=repo_name,
|
||||
owner=self.repo_owner,
|
||||
description=f"AI-generated project for {self.project_name}",
|
||||
auto_init=False,
|
||||
)
|
||||
if result.get("status") == "exists" and repo_name == self.gitea_api.build_project_repo_name(self.project_id, self.project_name):
|
||||
repo_name = f"{repo_name}-{self.project_id.split('-')[-1]}"
|
||||
result = await self.gitea_api.create_repo(
|
||||
repo_name=repo_name,
|
||||
owner=self.repo_owner,
|
||||
description=f"AI-generated project for {self.project_name}",
|
||||
auto_init=False,
|
||||
)
|
||||
self.repo_name = repo_name
|
||||
self.ui_manager.ui_data["repository"]["name"] = repo_name
|
||||
if self.db_manager:
|
||||
self.db_manager.log_system_event(
|
||||
component="gitea",
|
||||
level="ERROR" if result.get("error") else "INFO",
|
||||
message=(
|
||||
f"Repository setup failed for {self.repo_owner}/{self.repo_name}: {result.get('error')}"
|
||||
if result.get("error")
|
||||
else f"Prepared repository {self.repo_owner}/{self.repo_name}"
|
||||
),
|
||||
)
|
||||
repo_status = result.get("status", "error" if result.get("error") else "ready")
|
||||
self.ui_manager.ui_data["repository"]["status"] = repo_status
|
||||
self.ui_manager.ui_data["repository"]["api_response"] = {
|
||||
key: value
|
||||
for key, value in result.items()
|
||||
if key not in {"private"}
|
||||
}
|
||||
if result.get("status_code") is not None:
|
||||
self.ui_manager.ui_data["repository"]["api_status_code"] = result.get("status_code")
|
||||
if result.get("error"):
|
||||
self.ui_manager.ui_data["repository"]["reason"] = result.get("error")
|
||||
self.ui_manager.ui_data["repository"].pop("url", None)
|
||||
elif result.get("html_url"):
|
||||
self.repo_url = result["html_url"]
|
||||
self.ui_manager.ui_data["repository"]["url"] = self.repo_url
|
||||
clone_url = result.get("clone_url") or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||
if clone_url:
|
||||
self.ui_manager.ui_data["repository"]["clone_url"] = clone_url
|
||||
self.ui_manager.ui_data["repository"].pop("reason", None)
|
||||
elif repo_status == "exists":
|
||||
predicted_url = self._build_repo_url(self.repo_owner, self.repo_name)
|
||||
if predicted_url:
|
||||
self.repo_url = predicted_url
|
||||
self.ui_manager.ui_data["repository"]["url"] = predicted_url
|
||||
clone_url = result.get("clone_url") or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||
if clone_url:
|
||||
self.ui_manager.ui_data["repository"]["clone_url"] = clone_url
|
||||
else:
|
||||
self.ui_manager.ui_data["repository"].pop("url", None)
|
||||
|
||||
async def _resolve_gitea_username(self) -> str:
|
||||
"""Resolve and cache the Gitea login used for authenticated git operations."""
|
||||
if self._gitea_username:
|
||||
return self._gitea_username
|
||||
user_info = await self.gitea_api.get_current_user()
|
||||
if user_info.get('error') or not user_info.get('login'):
|
||||
raise RuntimeError(f"Unable to resolve Gitea user for push: {user_info.get('error', 'missing login')}")
|
||||
self._gitea_username = user_info['login']
|
||||
return self._gitea_username
|
||||
|
||||
async def _push_branch(self, branch: str) -> dict | None:
|
||||
"""Push a branch to the configured project repository when available."""
|
||||
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||
if repository.get('mode') != 'project':
|
||||
return None
|
||||
if repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||
return None
|
||||
if not settings.gitea_token or not self.repo_owner or not self.repo_name:
|
||||
return None
|
||||
|
||||
clone_url = repository.get('clone_url') or self.gitea_api.build_repo_git_url(self.repo_owner, self.repo_name)
|
||||
if not clone_url:
|
||||
return None
|
||||
username = await self._resolve_gitea_username()
|
||||
self.git_manager.push_with_credentials(
|
||||
remote_url=clone_url,
|
||||
username=username,
|
||||
password=settings.gitea_token,
|
||||
remote='origin',
|
||||
branch=branch,
|
||||
)
|
||||
return {'status': 'pushed', 'remote': clone_url, 'branch': branch}
|
||||
|
||||
async def _prepare_git_workspace(self) -> None:
|
||||
"""Initialize the local repo and ensure the PR branch exists before writing files."""
|
||||
if not self.git_manager.has_repo():
|
||||
self.git_manager.init_repo()
|
||||
|
||||
if not self.git_manager.current_head_or_none():
|
||||
self.git_manager.create_empty_commit('Initialize project repository')
|
||||
try:
|
||||
await self._push_branch('main')
|
||||
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||
self.ui_manager.ui_data.setdefault('git', {})['remote_error'] = str(exc)
|
||||
self._append_log(f'Initial main push skipped: {exc}')
|
||||
|
||||
if self.git_manager.branch_exists(self.branch_name):
|
||||
self.git_manager.checkout_branch(self.branch_name)
|
||||
else:
|
||||
self.git_manager.checkout_branch(self.branch_name, create=True, start_point='main')
|
||||
self.ui_manager.ui_data.setdefault('git', {})['active_branch'] = self.branch_name
|
||||
|
||||
async def _ensure_pull_request(self) -> dict | None:
|
||||
"""Create the project pull request on first delivery and reuse it later."""
|
||||
if self.active_pull_request:
|
||||
self.ui_manager.ui_data['pull_request'] = self.active_pull_request
|
||||
return self.active_pull_request
|
||||
repository = self.ui_manager.ui_data.get('repository') or {}
|
||||
if repository.get('mode') != 'project' or repository.get('status') not in {'created', 'exists', 'ready'}:
|
||||
return None
|
||||
|
||||
title = f"AI delivery for {self.project_name}"
|
||||
body = (
|
||||
f"Automated software factory changes for {self.project_name}.\n\n"
|
||||
f"Prompt: {self.prompt_text or self.description}\n\n"
|
||||
f"Branch: {self.branch_name}"
|
||||
)
|
||||
result = await self.gitea_api.create_pull_request(
|
||||
title=title,
|
||||
body=body,
|
||||
owner=self.repo_owner,
|
||||
repo=self.repo_name,
|
||||
base='main',
|
||||
head=self.branch_name,
|
||||
)
|
||||
if result.get('error'):
|
||||
raise RuntimeError(f"Unable to create pull request: {result.get('error')}")
|
||||
|
||||
pr_number = result.get('number') or result.get('id') or 0
|
||||
pr_data = {
|
||||
'pr_number': pr_number,
|
||||
'title': result.get('title', title),
|
||||
'body': result.get('body', body),
|
||||
'state': result.get('state', 'open'),
|
||||
'base': result.get('base', {}).get('ref', 'main') if isinstance(result.get('base'), dict) else 'main',
|
||||
'user': result.get('user', {}).get('login', 'system') if isinstance(result.get('user'), dict) else 'system',
|
||||
'pr_url': result.get('html_url') or self.gitea_api.build_pull_request_url(pr_number, self.repo_owner, self.repo_name),
|
||||
'merged': bool(result.get('merged')),
|
||||
'pr_state': result.get('state', 'open'),
|
||||
}
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.save_pr_data(self.history.id, pr_data)
|
||||
self.active_pull_request = self.db_manager.get_open_pull_request(project_id=self.project_id) if self.db_manager else pr_data
|
||||
self.ui_manager.ui_data['pull_request'] = self.active_pull_request or pr_data
|
||||
return self.active_pull_request or pr_data
|
||||
|
||||
async def _push_remote_commit(self, commit_hash: str, commit_message: str, changed_files: list[str], base_commit: str | None) -> dict | None:
|
||||
"""Push the local commit to the provisioned Gitea repository and build browser links."""
|
||||
repository = self.ui_manager.ui_data.get("repository") or {}
|
||||
if repository.get("mode") != "project":
|
||||
return None
|
||||
if repository.get("status") not in {"created", "exists", "ready"}:
|
||||
return None
|
||||
push_result = await self._push_branch(self.branch_name)
|
||||
if push_result is None:
|
||||
return None
|
||||
pull_request = await self._ensure_pull_request()
|
||||
commit_url = self.gitea_api.build_commit_url(commit_hash, owner=self.repo_owner, repo=self.repo_name)
|
||||
compare_url = self.gitea_api.build_compare_url(base_commit, commit_hash, owner=self.repo_owner, repo=self.repo_name) if base_commit else None
|
||||
remote_record = {
|
||||
"status": "pushed",
|
||||
"remote": push_result.get('remote'),
|
||||
"branch": self.branch_name,
|
||||
"commit_url": commit_url,
|
||||
"compare_url": compare_url,
|
||||
"changed_files": changed_files,
|
||||
"pull_request": pull_request,
|
||||
}
|
||||
self.ui_manager.ui_data.setdefault("git", {})["remote_push"] = remote_record
|
||||
repository["last_commit_url"] = commit_url
|
||||
if compare_url:
|
||||
repository["last_compare_url"] = compare_url
|
||||
self._append_log(f"Pushed generated commit to {self.repo_owner}/{self.repo_name}.")
|
||||
return remote_record
|
||||
|
||||
def _build_diff_text(self, relative_path: str, previous_content: str, new_content: str) -> str:
|
||||
"""Build a unified diff for display in the dashboard."""
|
||||
previous_lines = previous_content.splitlines(keepends=True)
|
||||
new_lines = new_content.splitlines(keepends=True)
|
||||
diff = difflib.unified_diff(
|
||||
previous_lines,
|
||||
new_lines,
|
||||
fromfile=f"a/{relative_path}",
|
||||
tofile=f"b/{relative_path}",
|
||||
)
|
||||
return "".join(diff)
|
||||
|
||||
def _append_log(self, message: str) -> None:
|
||||
timestamped = f"[{datetime.utcnow().isoformat()}] {message}"
|
||||
self.logs.append(timestamped)
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager._log_action(self.history.id, "INFO", message)
|
||||
|
||||
def _update_progress(self, progress: int, step: str, message: str) -> None:
|
||||
self.progress = progress
|
||||
self.current_step = step
|
||||
self.message = message
|
||||
self.ui_manager.update_status(self.status, progress, message)
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.log_progress_update(
|
||||
history_id=self.history.id,
|
||||
progress=progress,
|
||||
step=step,
|
||||
message=message,
|
||||
)
|
||||
|
||||
def _write_file(self, relative_path: str, content: str) -> None:
|
||||
target = self.project_root / relative_path
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
change_type = "UPDATE" if target.exists() else "CREATE"
|
||||
previous_content = target.read_text(encoding="utf-8") if target.exists() else ""
|
||||
diff_text = self._build_diff_text(relative_path, previous_content, content)
|
||||
target.write_text(content, encoding="utf-8")
|
||||
self.changed_files.append(relative_path)
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.log_code_change(
|
||||
project_id=self.project_id,
|
||||
change_type=change_type,
|
||||
file_path=relative_path,
|
||||
actor="orchestrator",
|
||||
actor_type="agent",
|
||||
details=f"{change_type.title()}d generated artifact {relative_path}",
|
||||
history_id=self.history.id,
|
||||
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||
diff_summary=f"Wrote {len(content.splitlines())} lines to {relative_path}",
|
||||
diff_text=diff_text,
|
||||
)
|
||||
|
||||
def _template_files(self) -> dict[str, str]:
|
||||
feature_section = "\n".join(f"- {feature}" for feature in self.features) or "- None specified"
|
||||
tech_section = "\n".join(f"- {tech}" for tech in self.tech_stack) or "- Python"
|
||||
return {
|
||||
".gitignore": "__pycache__/\n*.pyc\n.venv/\n.pytest_cache/\n.mypy_cache/\n",
|
||||
"README.md": (
|
||||
f"# {self.project_name}\n\n"
|
||||
f"{self.description}\n\n"
|
||||
"## Features\n"
|
||||
f"{feature_section}\n\n"
|
||||
"## Tech Stack\n"
|
||||
f"{tech_section}\n"
|
||||
),
|
||||
"requirements.txt": "fastapi\nuvicorn\npytest\n",
|
||||
"main.py": (
|
||||
"from fastapi import FastAPI\n\n"
|
||||
"app = FastAPI(title=\"Generated App\")\n\n"
|
||||
"@app.get('/')\n"
|
||||
"def read_root():\n"
|
||||
f" return {{'name': '{self.project_name}', 'status': 'generated', 'features': {self.features!r}}}\n"
|
||||
),
|
||||
"tests/test_app.py": (
|
||||
"from main import read_root\n\n"
|
||||
"def test_read_root():\n"
|
||||
f" assert read_root()['name'] == '{self.project_name}'\n"
|
||||
),
|
||||
}
|
||||
|
||||
async def run(self) -> dict:
|
||||
"""Run the software generation process with full audit logging."""
|
||||
try:
|
||||
# Step 1: Initialize project
|
||||
self.status = "running"
|
||||
self._update_progress(5, "initializing", "Setting up project structure...")
|
||||
self._append_log("Initializing project.")
|
||||
|
||||
await self._ensure_remote_repository()
|
||||
await self._sync_issue_context()
|
||||
await self._prepare_git_workspace()
|
||||
self._log_generation_plan_trace()
|
||||
|
||||
# Step 2: Create project structure (skip git operations)
|
||||
self._update_progress(20, "project-structure", "Creating project files...")
|
||||
await self._create_project_structure()
|
||||
|
||||
# Step 3: Generate initial code
|
||||
self._update_progress(55, "code-generation", "Generating project entrypoint and tests...")
|
||||
await self._generate_code()
|
||||
|
||||
# Step 4: Test the code
|
||||
self._update_progress(80, "validation", "Validating generated code...")
|
||||
await self._run_tests()
|
||||
|
||||
# Step 5: Commit generated artifacts locally for traceability
|
||||
self._update_progress(90, "git", "Recording generated changes in git...")
|
||||
await self._commit_to_git()
|
||||
|
||||
# Step 7: Complete
|
||||
self.status = "completed"
|
||||
self._update_progress(100, "completed", "Software generation complete!")
|
||||
self._append_log("Software generation complete!")
|
||||
self.ui_manager.ui_data["changed_files"] = list(dict.fromkeys(self.changed_files))
|
||||
|
||||
# Log completion to database if available
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.save_ui_snapshot(self.history.id, self.ui_manager.get_ui_data())
|
||||
self.db_manager.log_project_complete(
|
||||
history_id=self.history.id,
|
||||
message="Software generation complete!"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"progress": self.progress,
|
||||
"message": self.message,
|
||||
"current_step": self.current_step,
|
||||
"logs": self.logs,
|
||||
"ui_data": self.ui_manager.ui_data,
|
||||
"history_id": self.history.id if self.history else None,
|
||||
"project_root": str(self.project_root),
|
||||
"changed_files": list(dict.fromkeys(self.changed_files)),
|
||||
"repository": self.ui_manager.ui_data.get("repository"),
|
||||
"related_issue": self.related_issue,
|
||||
"pull_request": self.ui_manager.ui_data.get("pull_request"),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.status = "error"
|
||||
self.message = f"Error: {str(e)}"
|
||||
self._append_log(f"Error: {str(e)}")
|
||||
|
||||
# Log error to database if available
|
||||
if self.db_manager and self.history:
|
||||
self.db_manager.log_error(
|
||||
history_id=self.history.id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "error",
|
||||
"progress": self.progress,
|
||||
"message": self.message,
|
||||
"current_step": self.current_step,
|
||||
"logs": self.logs,
|
||||
"error": str(e),
|
||||
"ui_data": self.ui_manager.ui_data,
|
||||
"history_id": self.history.id if self.history else None,
|
||||
"project_root": str(self.project_root),
|
||||
"changed_files": list(dict.fromkeys(self.changed_files)),
|
||||
"repository": self.ui_manager.ui_data.get("repository"),
|
||||
"related_issue": self.related_issue,
|
||||
"pull_request": self.ui_manager.ui_data.get("pull_request"),
|
||||
}
|
||||
|
||||
async def _create_project_structure(self) -> None:
|
||||
"""Create initial project structure."""
|
||||
self.project_root.mkdir(parents=True, exist_ok=True)
|
||||
for relative_path, content in self._template_files().items():
|
||||
if relative_path.startswith("main.py") or relative_path.startswith("tests/"):
|
||||
continue
|
||||
self._write_file(relative_path, content)
|
||||
self._append_log(f"Project structure created under {self.project_root}.")
|
||||
|
||||
async def _generate_code(self) -> None:
|
||||
"""Generate code using Ollama."""
|
||||
for relative_path, content in self._template_files().items():
|
||||
if relative_path in {"main.py", "tests/test_app.py"}:
|
||||
self._write_file(relative_path, content)
|
||||
self._append_log("Application entrypoint and smoke test generated.")
|
||||
|
||||
async def _run_tests(self) -> None:
|
||||
"""Run tests for the generated code."""
|
||||
py_compile.compile(str(self.project_root / "main.py"), doraise=True)
|
||||
py_compile.compile(str(self.project_root / "tests/test_app.py"), doraise=True)
|
||||
self._append_log("Generated Python files compiled successfully.")
|
||||
|
||||
async def _commit_to_git(self) -> None:
|
||||
"""Commit changes to git."""
|
||||
unique_files = list(dict.fromkeys(self.changed_files))
|
||||
if not unique_files:
|
||||
return
|
||||
|
||||
try:
|
||||
if not self.git_manager.has_repo():
|
||||
self.git_manager.init_repo()
|
||||
base_commit = self.git_manager.current_head_or_none()
|
||||
self.git_manager.add_files(unique_files)
|
||||
if not self.git_manager.get_status():
|
||||
return
|
||||
|
||||
commit_message = f"AI generation for prompt: {self.project_name}"
|
||||
commit_hash = self.git_manager.commit(commit_message)
|
||||
commit_record = {
|
||||
"hash": commit_hash,
|
||||
"message": commit_message,
|
||||
"files": unique_files,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"scope": "local",
|
||||
"branch": self.branch_name,
|
||||
}
|
||||
remote_record = None
|
||||
try:
|
||||
remote_record = await self._push_remote_commit(commit_hash, commit_message, unique_files, base_commit)
|
||||
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError) as remote_exc:
|
||||
self.ui_manager.ui_data.setdefault("git", {})["remote_error"] = str(remote_exc)
|
||||
self._append_log(f"Remote git push skipped: {remote_exc}")
|
||||
|
||||
if remote_record:
|
||||
commit_record["scope"] = "remote"
|
||||
commit_record["commit_url"] = remote_record.get("commit_url")
|
||||
commit_record["compare_url"] = remote_record.get("compare_url")
|
||||
if remote_record.get('pull_request'):
|
||||
commit_record['pull_request'] = remote_record['pull_request']
|
||||
self.ui_manager.ui_data['pull_request'] = remote_record['pull_request']
|
||||
self.ui_manager.ui_data.setdefault("git", {})["latest_commit"] = commit_record
|
||||
self.ui_manager.ui_data.setdefault("git", {})["commits"] = [commit_record]
|
||||
self._append_log(f"Recorded git commit {commit_hash[:12]} for generated files.")
|
||||
if self.db_manager:
|
||||
self.db_manager.log_commit(
|
||||
project_id=self.project_id,
|
||||
commit_message=commit_message,
|
||||
actor="orchestrator",
|
||||
actor_type="agent",
|
||||
history_id=self.history.id if self.history else None,
|
||||
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||
commit_hash=commit_hash,
|
||||
changed_files=unique_files,
|
||||
branch=self.branch_name,
|
||||
commit_url=remote_record.get("commit_url") if remote_record else None,
|
||||
compare_url=remote_record.get("compare_url") if remote_record else None,
|
||||
remote_status=remote_record.get("status") if remote_record else "local-only",
|
||||
related_issue=self.related_issue,
|
||||
)
|
||||
if self.related_issue:
|
||||
self.db_manager.log_issue_work(
|
||||
project_id=self.project_id,
|
||||
history_id=self.history.id if self.history else None,
|
||||
prompt_id=self.prompt_audit.id if self.prompt_audit else None,
|
||||
issue=self.related_issue,
|
||||
actor='orchestrator',
|
||||
commit_hash=commit_hash,
|
||||
commit_url=remote_record.get('commit_url') if remote_record else None,
|
||||
)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||
self.ui_manager.ui_data.setdefault("git", {})["error"] = str(exc)
|
||||
self._append_log(f"Git commit skipped: {exc}")
|
||||
|
||||
async def _create_pr(self) -> None:
|
||||
"""Create pull request."""
|
||||
pass # Skip PR creation in test environment
|
||||
|
||||
def update_status(self, status: str, progress: int, message: str) -> None:
|
||||
"""Update status and progress."""
|
||||
self.status = status
|
||||
self.progress = progress
|
||||
self.message = message
|
||||
|
||||
def get_ui_data(self) -> dict:
|
||||
"""Get UI data."""
|
||||
return self.ui_manager.ui_data
|
||||
|
||||
def render_dashboard(self) -> str:
|
||||
"""Render dashboard HTML."""
|
||||
return self.ui_manager.render_dashboard()
|
||||
|
||||
def get_history(self) -> Optional[dict]:
|
||||
"""Get project history from database."""
|
||||
if self.db_manager and self.history:
|
||||
return self.db_manager.get_project_audit_data(self.history.project_id)
|
||||
return None
|
||||
127
ai_software_factory/agents/prompt_workflow.py
Normal file
127
ai_software_factory/agents/prompt_workflow.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Helpers for prompt-level repository workflows such as undoing a prompt."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
from .database_manager import DatabaseManager
|
||||
from .git_manager import GitManager
|
||||
from .gitea import GiteaAPI
|
||||
except ImportError:
|
||||
from config import settings
|
||||
from agents.database_manager import DatabaseManager
|
||||
from agents.git_manager import GitManager
|
||||
from agents.gitea import GiteaAPI
|
||||
|
||||
|
||||
class PromptWorkflowManager:
|
||||
"""Coordinate prompt-level repository actions against git and Gitea."""
|
||||
|
||||
def __init__(self, db):
|
||||
self.db_manager = DatabaseManager(db)
|
||||
self.gitea_api = GiteaAPI(
|
||||
token=settings.GITEA_TOKEN,
|
||||
base_url=settings.GITEA_URL,
|
||||
owner=settings.GITEA_OWNER,
|
||||
repo=settings.GITEA_REPO or '',
|
||||
)
|
||||
|
||||
async def undo_prompt(self, project_id: str, prompt_id: int) -> dict:
|
||||
"""Revert the commit associated with a prompt and push the revert to the PR branch."""
|
||||
history = self.db_manager.get_project_by_id(project_id)
|
||||
if history is None:
|
||||
return {'status': 'error', 'message': 'Project not found'}
|
||||
|
||||
correlations = self.db_manager.get_prompt_change_correlations(project_id=project_id, limit=500)
|
||||
correlation = next((item for item in correlations if item.get('prompt_id') == prompt_id), None)
|
||||
if correlation is None:
|
||||
return {'status': 'error', 'message': 'Prompt not found for project'}
|
||||
if correlation.get('revert'):
|
||||
return {'status': 'ignored', 'message': 'Prompt has already been reverted', 'revert': correlation['revert']}
|
||||
|
||||
original_commit = next(
|
||||
(commit for commit in correlation.get('commits', []) if commit.get('remote_status') != 'reverted' and commit.get('commit_hash')),
|
||||
None,
|
||||
)
|
||||
if original_commit is None:
|
||||
return {'status': 'error', 'message': 'No reversible commit was recorded for this prompt'}
|
||||
|
||||
branch = original_commit.get('branch') or f'ai/{project_id}'
|
||||
project_root = settings.projects_root / project_id
|
||||
git_manager = GitManager(project_id, project_dir=str(project_root))
|
||||
if not git_manager.has_repo():
|
||||
return {'status': 'error', 'message': 'Local project repository is not available for undo'}
|
||||
|
||||
try:
|
||||
git_manager.checkout_branch(branch)
|
||||
previous_head = git_manager.current_head_or_none()
|
||||
revert_commit_hash = git_manager.revert_commit(original_commit['commit_hash'])
|
||||
except (subprocess.CalledProcessError, FileNotFoundError) as exc:
|
||||
return {'status': 'error', 'message': f'Unable to revert prompt commit: {exc}'}
|
||||
|
||||
repository = self.db_manager.get_project_audit_data(project_id).get('repository') or {}
|
||||
commit_url = None
|
||||
compare_url = None
|
||||
if (
|
||||
repository.get('mode') == 'project'
|
||||
and repository.get('status') in {'created', 'exists', 'ready'}
|
||||
and settings.gitea_token
|
||||
and repository.get('owner')
|
||||
and repository.get('name')
|
||||
):
|
||||
try:
|
||||
user_info = await self.gitea_api.get_current_user()
|
||||
username = user_info.get('login') if isinstance(user_info, dict) else None
|
||||
if username and not user_info.get('error'):
|
||||
remote_url = repository.get('clone_url') or self.gitea_api.build_repo_git_url(repository.get('owner'), repository.get('name'))
|
||||
if remote_url:
|
||||
git_manager.push_with_credentials(
|
||||
remote_url=remote_url,
|
||||
username=username,
|
||||
password=settings.gitea_token,
|
||||
branch=branch,
|
||||
)
|
||||
commit_url = self.gitea_api.build_commit_url(revert_commit_hash, repository.get('owner'), repository.get('name'))
|
||||
if previous_head:
|
||||
compare_url = self.gitea_api.build_compare_url(previous_head, revert_commit_hash, repository.get('owner'), repository.get('name'))
|
||||
except (RuntimeError, subprocess.CalledProcessError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
self.db_manager.log_commit(
|
||||
project_id=project_id,
|
||||
commit_message=f'Revert prompt {prompt_id}',
|
||||
actor='dashboard',
|
||||
actor_type='user',
|
||||
history_id=history.id,
|
||||
prompt_id=prompt_id,
|
||||
commit_hash=revert_commit_hash,
|
||||
changed_files=original_commit.get('changed_files', []),
|
||||
branch=branch,
|
||||
commit_url=commit_url,
|
||||
compare_url=compare_url,
|
||||
remote_status='reverted',
|
||||
)
|
||||
self.db_manager.log_prompt_revert(
|
||||
project_id=project_id,
|
||||
prompt_id=prompt_id,
|
||||
reverted_commit_hash=original_commit['commit_hash'],
|
||||
revert_commit_hash=revert_commit_hash,
|
||||
actor='dashboard',
|
||||
commit_url=commit_url,
|
||||
)
|
||||
self.db_manager.log_system_event(
|
||||
component='git',
|
||||
level='INFO',
|
||||
message=f'Reverted prompt {prompt_id} for project {project_id}',
|
||||
)
|
||||
return {
|
||||
'status': 'success',
|
||||
'project_id': project_id,
|
||||
'prompt_id': prompt_id,
|
||||
'reverted_commit_hash': original_commit['commit_hash'],
|
||||
'revert_commit_hash': revert_commit_hash,
|
||||
'commit_url': commit_url,
|
||||
'compare_url': compare_url,
|
||||
}
|
||||
249
ai_software_factory/agents/request_interpreter.py
Normal file
249
ai_software_factory/agents/request_interpreter.py
Normal file
@@ -0,0 +1,249 @@
|
||||
"""Interpret free-form software requests into structured generation input."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
try:
|
||||
from ..config import settings
|
||||
except ImportError:
|
||||
from config import settings
|
||||
|
||||
|
||||
class RequestInterpreter:
|
||||
"""Use Ollama to turn free-form text into a structured software request."""
|
||||
|
||||
def __init__(self, ollama_url: str | None = None, model: str | None = None):
|
||||
self.ollama_url = (ollama_url or settings.ollama_url).rstrip('/')
|
||||
self.model = model or settings.OLLAMA_MODEL
|
||||
|
||||
async def interpret(self, prompt_text: str, context: dict | None = None) -> dict:
|
||||
"""Interpret free-form text into the request shape expected by the orchestrator."""
|
||||
interpreted, _trace = await self.interpret_with_trace(prompt_text, context=context)
|
||||
return interpreted
|
||||
|
||||
async def interpret_with_trace(self, prompt_text: str, context: dict | None = None) -> tuple[dict, dict]:
|
||||
"""Interpret free-form text into the request shape expected by the orchestrator."""
|
||||
normalized = prompt_text.strip()
|
||||
if not normalized:
|
||||
raise ValueError('Prompt text cannot be empty')
|
||||
|
||||
compact_context = self._build_compact_context(context or {})
|
||||
|
||||
system_prompt = (
|
||||
'You route Telegram software prompts. '
|
||||
'Decide whether the prompt starts a new project or continues an existing tracked project. '
|
||||
'When continuing, identify the best matching project_id from the provided context and the issue number if one is mentioned or implied by recent chat history. '
|
||||
'Return only JSON with keys request and routing. '
|
||||
'request must contain name, description, features, tech_stack. '
|
||||
'routing must contain intent, project_id, project_name, issue_number, confidence, and reasoning_summary. '
|
||||
'Use the provided project catalog and recent chat history. '
|
||||
'If the user says things like also, continue, work on this, that issue, or follow-up wording, prefer continuation of the most relevant recent project. '
|
||||
'If the user explicitly asks for a new project, set intent to new_project.'
|
||||
)
|
||||
user_prompt = normalized
|
||||
if compact_context:
|
||||
user_prompt = (
|
||||
f"Conversation context:\n{json.dumps(compact_context, indent=2)}\n\n"
|
||||
f"User prompt:\n{normalized}"
|
||||
)
|
||||
|
||||
try:
|
||||
import aiohttp
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
f'{self.ollama_url}/api/chat',
|
||||
json={
|
||||
'model': self.model,
|
||||
'stream': False,
|
||||
'format': 'json',
|
||||
'messages': [
|
||||
{
|
||||
'role': 'system',
|
||||
'content': system_prompt,
|
||||
},
|
||||
{'role': 'user', 'content': user_prompt},
|
||||
],
|
||||
},
|
||||
) as resp:
|
||||
payload = await resp.json()
|
||||
if 200 <= resp.status < 300:
|
||||
content = payload.get('message', {}).get('content', '')
|
||||
if content:
|
||||
parsed = json.loads(content)
|
||||
interpreted = self._normalize_interpreted_request(parsed, normalized)
|
||||
routing = self._normalize_routing(parsed.get('routing'), interpreted, compact_context)
|
||||
return interpreted, {
|
||||
'stage': 'request_interpretation',
|
||||
'provider': 'ollama',
|
||||
'model': self.model,
|
||||
'system_prompt': system_prompt,
|
||||
'user_prompt': user_prompt,
|
||||
'assistant_response': content,
|
||||
'raw_response': payload,
|
||||
'routing': routing,
|
||||
'context_excerpt': compact_context,
|
||||
'fallback_used': False,
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
interpreted, routing = self._heuristic_fallback(normalized, compact_context)
|
||||
return interpreted, {
|
||||
'stage': 'request_interpretation',
|
||||
'provider': 'heuristic',
|
||||
'model': self.model,
|
||||
'system_prompt': system_prompt,
|
||||
'user_prompt': user_prompt,
|
||||
'assistant_response': json.dumps({'request': interpreted, 'routing': routing}),
|
||||
'raw_response': {'fallback': 'heuristic'},
|
||||
'routing': routing,
|
||||
'context_excerpt': compact_context,
|
||||
'fallback_used': True,
|
||||
}
|
||||
|
||||
def _normalize_interpreted_request(self, interpreted: dict, original_prompt: str) -> dict:
|
||||
"""Normalize LLM output into the required request shape."""
|
||||
request_payload = interpreted.get('request') if isinstance(interpreted.get('request'), dict) else interpreted
|
||||
name = str(interpreted.get('name') or '').strip() or self._derive_name(original_prompt)
|
||||
if isinstance(request_payload, dict):
|
||||
name = str(request_payload.get('name') or '').strip() or self._derive_name(original_prompt)
|
||||
description = str((request_payload or {}).get('description') or '').strip() or original_prompt[:255]
|
||||
features = self._normalize_list((request_payload or {}).get('features'))
|
||||
tech_stack = self._normalize_list((request_payload or {}).get('tech_stack'))
|
||||
if not features:
|
||||
features = ['core workflow based on free-form request']
|
||||
return {
|
||||
'name': name[:255],
|
||||
'description': description[:255],
|
||||
'features': features,
|
||||
'tech_stack': tech_stack,
|
||||
}
|
||||
|
||||
def _build_compact_context(self, context: dict) -> dict:
|
||||
"""Reduce interpreter context to the fields that help routing."""
|
||||
projects = []
|
||||
for project in context.get('projects', [])[:10]:
|
||||
issues = []
|
||||
for issue in project.get('open_issues', [])[:5]:
|
||||
issues.append({'number': issue.get('number'), 'title': issue.get('title'), 'state': issue.get('state')})
|
||||
projects.append(
|
||||
{
|
||||
'project_id': project.get('project_id'),
|
||||
'name': project.get('name'),
|
||||
'description': project.get('description'),
|
||||
'repository': project.get('repository'),
|
||||
'open_pull_request': bool(project.get('open_pull_request')),
|
||||
'open_issues': issues,
|
||||
}
|
||||
)
|
||||
return {
|
||||
'chat_id': context.get('chat_id'),
|
||||
'recent_chat_history': context.get('recent_chat_history', [])[:8],
|
||||
'projects': projects,
|
||||
}
|
||||
|
||||
def _normalize_routing(self, routing: dict | None, interpreted: dict, context: dict) -> dict:
|
||||
"""Normalize routing metadata returned by the LLM."""
|
||||
routing = routing or {}
|
||||
project_id = routing.get('project_id')
|
||||
project_name = routing.get('project_name')
|
||||
issue_number = routing.get('issue_number')
|
||||
if issue_number in ('', None):
|
||||
issue_number = None
|
||||
elif isinstance(issue_number, str) and issue_number.isdigit():
|
||||
issue_number = int(issue_number)
|
||||
matched_project = None
|
||||
for project in context.get('projects', []):
|
||||
if project_id and project.get('project_id') == project_id:
|
||||
matched_project = project
|
||||
break
|
||||
if project_name and project.get('name') == project_name:
|
||||
matched_project = project
|
||||
break
|
||||
intent = str(routing.get('intent') or '').strip() or ('continue_project' if matched_project else 'new_project')
|
||||
return {
|
||||
'intent': intent,
|
||||
'project_id': matched_project.get('project_id') if matched_project else project_id,
|
||||
'project_name': matched_project.get('name') if matched_project else (project_name or interpreted.get('name')),
|
||||
'issue_number': issue_number,
|
||||
'confidence': routing.get('confidence') or ('medium' if matched_project else 'low'),
|
||||
'reasoning_summary': routing.get('reasoning_summary') or ('Matched prior project context' if matched_project else 'No strong prior project match found'),
|
||||
}
|
||||
|
||||
def _normalize_list(self, value) -> list[str]:
|
||||
if isinstance(value, list):
|
||||
return [str(item).strip() for item in value if str(item).strip()]
|
||||
if isinstance(value, str) and value.strip():
|
||||
return [item.strip() for item in value.split(',') if item.strip()]
|
||||
return []
|
||||
|
||||
def _derive_name(self, prompt_text: str) -> str:
|
||||
"""Derive a stable project name when the LLM does not provide one."""
|
||||
first_line = prompt_text.splitlines()[0].strip()
|
||||
cleaned = re.sub(r'[^A-Za-z0-9 ]+', ' ', first_line)
|
||||
words = [word.capitalize() for word in cleaned.split()[:4]]
|
||||
return ' '.join(words) or 'Generated Project'
|
||||
|
||||
def _heuristic_fallback(self, prompt_text: str, context: dict | None = None) -> tuple[dict, dict]:
|
||||
"""Fallback request extraction when Ollama is unavailable."""
|
||||
lowered = prompt_text.lower()
|
||||
tech_candidates = [
|
||||
'python', 'fastapi', 'django', 'flask', 'postgresql', 'sqlite', 'react', 'vue', 'nicegui', 'docker'
|
||||
]
|
||||
tech_stack = [candidate for candidate in tech_candidates if candidate in lowered]
|
||||
sentences = [part.strip() for part in re.split(r'[\n\.]+', prompt_text) if part.strip()]
|
||||
features = sentences[:3] or ['Implement the user request from free-form text']
|
||||
interpreted = {
|
||||
'name': self._derive_name(prompt_text),
|
||||
'description': sentences[0][:255] if sentences else prompt_text[:255],
|
||||
'features': features,
|
||||
'tech_stack': tech_stack,
|
||||
}
|
||||
routing = self._heuristic_routing(prompt_text, context or {})
|
||||
if routing.get('project_name'):
|
||||
interpreted['name'] = routing['project_name']
|
||||
return interpreted, routing
|
||||
|
||||
def _heuristic_routing(self, prompt_text: str, context: dict) -> dict:
|
||||
"""Best-effort routing when the LLM is unavailable."""
|
||||
lowered = prompt_text.lower()
|
||||
explicit_new = any(token in lowered for token in ['new project', 'start a new project', 'create a new project', 'build a new app'])
|
||||
referenced_issue = self._extract_issue_number(prompt_text)
|
||||
recent_history = context.get('recent_chat_history', [])
|
||||
projects = context.get('projects', [])
|
||||
last_project_id = recent_history[0].get('project_id') if recent_history else None
|
||||
last_issue = ((recent_history[0].get('related_issue') or {}).get('number') if recent_history else None)
|
||||
|
||||
matched_project = None
|
||||
for project in projects:
|
||||
name = (project.get('name') or '').lower()
|
||||
repo = ((project.get('repository') or {}).get('name') or '').lower()
|
||||
if name and name in lowered:
|
||||
matched_project = project
|
||||
break
|
||||
if repo and repo in lowered:
|
||||
matched_project = project
|
||||
break
|
||||
if matched_project is None and not explicit_new:
|
||||
follow_up_tokens = ['also', 'continue', 'for this project', 'for that project', 'work on this', 'work on that', 'fix that', 'add this']
|
||||
if any(token in lowered for token in follow_up_tokens) and last_project_id:
|
||||
matched_project = next((project for project in projects if project.get('project_id') == last_project_id), None)
|
||||
issue_number = referenced_issue
|
||||
if issue_number is None and any(token in lowered for token in ['that issue', 'this issue', 'the issue']) and last_issue is not None:
|
||||
issue_number = last_issue
|
||||
intent = 'new_project' if explicit_new or matched_project is None else 'continue_project'
|
||||
return {
|
||||
'intent': intent,
|
||||
'project_id': matched_project.get('project_id') if matched_project else None,
|
||||
'project_name': matched_project.get('name') if matched_project else self._derive_name(prompt_text),
|
||||
'issue_number': issue_number,
|
||||
'confidence': 'medium' if matched_project or explicit_new else 'low',
|
||||
'reasoning_summary': 'Heuristic routing from chat history and project names.',
|
||||
}
|
||||
|
||||
def _extract_issue_number(self, prompt_text: str) -> int | None:
|
||||
match = re.search(r'(?:#|issue\s+)(\d+)', prompt_text, flags=re.IGNORECASE)
|
||||
return int(match.group(1)) if match else None
|
||||
202
ai_software_factory/agents/telegram.py
Normal file
202
ai_software_factory/agents/telegram.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""Telegram bot integration for n8n webhook."""
|
||||
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class TelegramHandler:
|
||||
"""Handles Telegram messages via n8n webhook."""
|
||||
|
||||
def __init__(self, webhook_url: str):
|
||||
self.webhook_url = webhook_url
|
||||
self.api_url = "https://api.telegram.org/bot"
|
||||
|
||||
def build_prompt_guide_message(self, backend_url: str | None = None) -> str:
|
||||
"""Build a Telegram message explaining the expected prompt format."""
|
||||
lines = [
|
||||
"AI Software Factory is listening in this chat.",
|
||||
"",
|
||||
"You can send free-form software requests in normal language.",
|
||||
"",
|
||||
"Example:",
|
||||
"Build an internal inventory portal for our warehouse team.",
|
||||
"It should support role-based login, stock dashboards, and purchase orders.",
|
||||
"Prefer FastAPI, PostgreSQL, and a simple web UI.",
|
||||
"",
|
||||
"The backend will interpret the request and turn it into a structured project plan.",
|
||||
]
|
||||
if backend_url:
|
||||
lines.extend(["", f"Backend target: {backend_url}"])
|
||||
return "\n".join(lines)
|
||||
|
||||
async def send_message(self, bot_token: str, chat_id: str | int, text: str) -> dict:
|
||||
"""Send a direct Telegram message using the configured bot."""
|
||||
if not bot_token:
|
||||
return {"status": "error", "message": "Telegram bot token is not configured"}
|
||||
if chat_id in (None, ""):
|
||||
return {"status": "error", "message": "Telegram chat id is not configured"}
|
||||
|
||||
api_endpoint = f"{self.api_url}{bot_token}/sendMessage"
|
||||
|
||||
try:
|
||||
import aiohttp
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
api_endpoint,
|
||||
json={
|
||||
"chat_id": str(chat_id),
|
||||
"text": text,
|
||||
},
|
||||
) as resp:
|
||||
payload = await resp.json()
|
||||
if 200 <= resp.status < 300 and payload.get("ok"):
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Telegram prompt guide sent successfully",
|
||||
"payload": payload,
|
||||
}
|
||||
description = payload.get("description") or payload.get("message") or str(payload)
|
||||
return {
|
||||
"status": "error",
|
||||
"message": f"Telegram API returned {resp.status}: {description}",
|
||||
"payload": payload,
|
||||
}
|
||||
except Exception as exc:
|
||||
return {"status": "error", "message": str(exc)}
|
||||
|
||||
async def handle_message(self, message_data: dict) -> dict:
|
||||
"""Handle incoming Telegram message."""
|
||||
text = message_data.get("text", "")
|
||||
chat_id = message_data.get("chat", {}).get("id", "")
|
||||
|
||||
# Extract software request from message
|
||||
request = self._parse_request(text)
|
||||
|
||||
if request:
|
||||
# Forward to backend API
|
||||
async def fetch_software():
|
||||
try:
|
||||
import aiohttp
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
"http://localhost:8000/generate",
|
||||
json=request
|
||||
) as resp:
|
||||
return await resp.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
result = await fetch_software()
|
||||
return {
|
||||
"status": "success",
|
||||
"data": result,
|
||||
"response": self._format_response(result)
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status": "error",
|
||||
"message": "Could not parse software request"
|
||||
}
|
||||
|
||||
def _parse_request(self, text: str) -> Optional[dict]:
|
||||
"""Parse software request from user message."""
|
||||
# Simple parser - in production, use LLM to extract
|
||||
request = {
|
||||
"name": None,
|
||||
"description": None,
|
||||
"features": []
|
||||
}
|
||||
|
||||
lines = text.split("\n")
|
||||
|
||||
# Parse name
|
||||
name_idx = -1
|
||||
for i, line in enumerate(lines):
|
||||
line_stripped = line.strip()
|
||||
if line_stripped.lower().startswith("name:"):
|
||||
request["name"] = line_stripped.split(":", 1)[1].strip()
|
||||
name_idx = i
|
||||
break
|
||||
|
||||
if not request["name"]:
|
||||
return None
|
||||
|
||||
# Parse description (everything after name until features section)
|
||||
# First, find where features section starts
|
||||
features_idx = -1
|
||||
for i in range(name_idx + 1, len(lines)):
|
||||
line_stripped = lines[i].strip()
|
||||
if line_stripped.lower().startswith("features:"):
|
||||
features_idx = i
|
||||
break
|
||||
|
||||
if features_idx > name_idx:
|
||||
# Description is between name and features
|
||||
request["description"] = "\n".join(lines[name_idx + 1:features_idx]).strip()
|
||||
else:
|
||||
# Description is everything after name
|
||||
request["description"] = "\n".join(lines[name_idx + 1:]).strip()
|
||||
|
||||
# Strip description prefix if present
|
||||
if request["description"]:
|
||||
request["description"] = request["description"].strip()
|
||||
if request["description"].lower().startswith("description:"):
|
||||
request["description"] = request["description"][len("description:") + 1:].strip()
|
||||
|
||||
# Parse features
|
||||
if features_idx > 0:
|
||||
features_line = lines[features_idx]
|
||||
# Parse inline features after "Features:"
|
||||
if ":" in features_line:
|
||||
inline_part = features_line.split(":", 1)[1].strip()
|
||||
|
||||
# Skip if it starts with dash (it's a multiline list marker)
|
||||
if inline_part and not inline_part.startswith("-"):
|
||||
# Remove any leading dashes or asterisks
|
||||
if inline_part.startswith("-"):
|
||||
inline_part = inline_part[1:].strip()
|
||||
elif inline_part.startswith("*"):
|
||||
inline_part = inline_part[1:].strip()
|
||||
|
||||
if inline_part:
|
||||
# Split by comma for inline features
|
||||
request["features"].extend([f.strip() for f in inline_part.split(",") if f.strip()])
|
||||
|
||||
# Parse multiline features (dash lines after features:)
|
||||
for line in lines[features_idx + 1:]:
|
||||
line_stripped = line.strip()
|
||||
if not line_stripped:
|
||||
continue
|
||||
if line_stripped.startswith("-"):
|
||||
feature_text = line_stripped[1:].strip()
|
||||
if feature_text:
|
||||
request["features"].append(feature_text)
|
||||
elif line_stripped.startswith("*"):
|
||||
feature_text = line_stripped[1:].strip()
|
||||
if feature_text:
|
||||
request["features"].append(feature_text)
|
||||
elif ":" in line_stripped:
|
||||
# Non-feature line with colon
|
||||
break
|
||||
|
||||
# MUST have features
|
||||
if not request["features"]:
|
||||
return None
|
||||
|
||||
return request
|
||||
|
||||
def _format_response(self, result: dict) -> dict:
|
||||
"""Format response for Telegram."""
|
||||
status = result.get("status", "error")
|
||||
message = result.get("message", result.get("detail", ""))
|
||||
progress = result.get("progress", 0)
|
||||
|
||||
response_data = {
|
||||
"status": status,
|
||||
"message": message,
|
||||
"progress": progress,
|
||||
"project_name": result.get("name", "N/A"),
|
||||
"logs": result.get("logs", [])
|
||||
}
|
||||
|
||||
return response_data
|
||||
429
ai_software_factory/agents/ui_manager.py
Normal file
429
ai_software_factory/agents/ui_manager.py
Normal file
@@ -0,0 +1,429 @@
|
||||
"""UI manager for web dashboard with audit trail display."""
|
||||
|
||||
import html
|
||||
import json
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
class UIManager:
|
||||
"""Manages UI data and updates with audit trail display."""
|
||||
|
||||
def __init__(self, project_id: str):
|
||||
"""Initialize UI manager."""
|
||||
self.project_id = project_id
|
||||
self.ui_data = {
|
||||
"project_id": project_id,
|
||||
"status": "initialized",
|
||||
"progress": 0,
|
||||
"message": "Ready",
|
||||
"snapshots": [],
|
||||
"features": []
|
||||
}
|
||||
|
||||
def update_status(self, status: str, progress: int, message: str) -> None:
|
||||
"""Update UI status."""
|
||||
self.ui_data["status"] = status
|
||||
self.ui_data["progress"] = progress
|
||||
self.ui_data["message"] = message
|
||||
|
||||
def add_snapshot(self, data: str, created_at: Optional[str] = None) -> None:
|
||||
"""Add a snapshot of UI data."""
|
||||
snapshot = {
|
||||
"data": data,
|
||||
"created_at": created_at or self._get_current_timestamp()
|
||||
}
|
||||
self.ui_data.setdefault("snapshots", []).append(snapshot)
|
||||
|
||||
def add_feature(self, feature: str) -> None:
|
||||
"""Add a feature tag."""
|
||||
self.ui_data.setdefault("features", []).append(feature)
|
||||
|
||||
def _get_current_timestamp(self) -> str:
|
||||
"""Get current timestamp in ISO format."""
|
||||
from datetime import datetime
|
||||
return datetime.now().isoformat()
|
||||
|
||||
def get_ui_data(self) -> dict:
|
||||
"""Get current UI data."""
|
||||
return self.ui_data
|
||||
|
||||
def _escape_html(self, text: str) -> str:
|
||||
"""Escape HTML special characters for safe display."""
|
||||
if text is None:
|
||||
return ""
|
||||
return html.escape(str(text), quote=True)
|
||||
|
||||
def render_dashboard(self, audit_trail: Optional[List[dict]] = None,
|
||||
actions: Optional[List[dict]] = None,
|
||||
logs: Optional[List[dict]] = None) -> str:
|
||||
"""Render dashboard HTML with audit trail and history display."""
|
||||
|
||||
# Format logs for display
|
||||
logs_html = ""
|
||||
if logs:
|
||||
for log in logs:
|
||||
level = log.get("level", "INFO")
|
||||
message = self._escape_html(log.get("message", ""))
|
||||
timestamp = self._escape_html(log.get("timestamp", ""))
|
||||
|
||||
if level == "ERROR":
|
||||
level_class = "error"
|
||||
else:
|
||||
level_class = "info"
|
||||
|
||||
logs_html += f"""
|
||||
<div class="log-item">
|
||||
<span class="timestamp">{timestamp}</span>
|
||||
<span class="log-level {level_class}">[{level}]</span>
|
||||
<span>{message}</span>
|
||||
</div>"""
|
||||
|
||||
# Format audit trail for display
|
||||
audit_html = ""
|
||||
if audit_trail:
|
||||
for audit in audit_trail:
|
||||
action = audit.get("action", "")
|
||||
actor = self._escape_html(audit.get("actor", ""))
|
||||
timestamp = self._escape_html(audit.get("timestamp", ""))
|
||||
details = self._escape_html(audit.get("details", ""))
|
||||
metadata = audit.get("metadata", {})
|
||||
action_type = audit.get("action_type", "")
|
||||
|
||||
# Color classes for action types
|
||||
action_color = action_type.lower() if action_type else "neutral"
|
||||
|
||||
audit_html += f"""
|
||||
<div class="audit-item">
|
||||
<div class="audit-header">
|
||||
<span class="audit-action {action_color}">
|
||||
{self._escape_html(action)}
|
||||
</span>
|
||||
<span class="audit-actor">{actor}</span>
|
||||
<span class="audit-time">{timestamp}</span>
|
||||
</div>
|
||||
<div class="audit-details">{details}</div>
|
||||
{f'<div class="audit-metadata">{json.dumps(metadata)}</div>' if metadata else ''}
|
||||
</div>
|
||||
"""
|
||||
|
||||
# Format actions for display
|
||||
actions_html = ""
|
||||
if actions:
|
||||
for action in actions:
|
||||
action_type = action.get("action_type", "")
|
||||
description = self._escape_html(action.get("description", ""))
|
||||
actor_name = self._escape_html(action.get("actor_name", ""))
|
||||
actor_type = action.get("actor_type", "")
|
||||
timestamp = self._escape_html(action.get("timestamp", ""))
|
||||
|
||||
actions_html += f"""
|
||||
<div class="action-item">
|
||||
<div class="action-type">{self._escape_html(action_type)}</div>
|
||||
<div class="action-description">{description}</div>
|
||||
<div class="action-actor">{actor_type}: {actor_name}</div>
|
||||
<div class="action-time">{timestamp}</div>
|
||||
</div>"""
|
||||
|
||||
# Format snapshots for display
|
||||
snapshots_html = ""
|
||||
snapshots = self.ui_data.get("snapshots", [])
|
||||
if snapshots:
|
||||
for snapshot in snapshots:
|
||||
data = snapshot.get("data", "")
|
||||
created_at = snapshot.get("created_at", "")
|
||||
snapshots_html += f"""
|
||||
<div class="snapshot-item">
|
||||
<div class="snapshot-time">{created_at}</div>
|
||||
<pre class="snapshot-data">{data}</pre>
|
||||
</div>"""
|
||||
|
||||
# Build features HTML
|
||||
features_html = ""
|
||||
features = self.ui_data.get("features", [])
|
||||
if features:
|
||||
feature_tags = []
|
||||
for feat in features:
|
||||
feature_tags.append(f'<span class="feature-tag">{self._escape_html(feat)}</span>')
|
||||
features_html = f'<div class="features">{"".join(feature_tags)}</div>'
|
||||
|
||||
# Build project header HTML
|
||||
project_id_escaped = self._escape_html(self.ui_data.get('project_id', 'Project'))
|
||||
status = self.ui_data.get('status', 'initialized')
|
||||
|
||||
# Determine empty state message
|
||||
empty_state_message = ""
|
||||
if not audit_trail and not actions and not snapshots_html:
|
||||
empty_state_message = 'No audit trail entries available'
|
||||
|
||||
return f"""<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AI Software Factory Dashboard</title>
|
||||
<style>
|
||||
* {{ margin: 0; padding: 0; box-sizing: border-box; }}
|
||||
body {{
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
min-height: 100vh;
|
||||
padding: 2rem;
|
||||
}}
|
||||
.container {{
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
background: white;
|
||||
border-radius: 16px;
|
||||
padding: 2rem;
|
||||
box-shadow: 0 20px 60px rgba(0,0,0,0.3);
|
||||
}}
|
||||
h1 {{
|
||||
color: #333;
|
||||
margin-bottom: 1.5rem;
|
||||
font-size: 2rem;
|
||||
}}
|
||||
h2 {{
|
||||
color: #444;
|
||||
margin: 2rem 0 1rem;
|
||||
font-size: 1.5rem;
|
||||
border-bottom: 2px solid #667eea;
|
||||
padding-bottom: 0.5rem;
|
||||
}}
|
||||
.project {{
|
||||
background: #f8f9fa;
|
||||
border-radius: 12px;
|
||||
padding: 1.5rem;
|
||||
margin-bottom: 1rem;
|
||||
}}
|
||||
.project-header {{
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1rem;
|
||||
}}
|
||||
.project-name {{
|
||||
font-size: 1.25rem;
|
||||
font-weight: bold;
|
||||
color: #333;
|
||||
}}
|
||||
.status-badge {{
|
||||
padding: 0.5rem 1rem;
|
||||
border-radius: 20px;
|
||||
font-weight: bold;
|
||||
font-size: 0.85rem;
|
||||
}}
|
||||
.status-badge.running {{ background: #fff3cd; color: #856404; }}
|
||||
.status-badge.completed {{ background: #d4edda; color: #155724; }}
|
||||
.status-badge.error {{ background: #f8d7da; color: #721c24; }}
|
||||
.status-badge.initialized {{ background: #e2e3e5; color: #383d41; }}
|
||||
.progress-bar {{
|
||||
width: 100%;
|
||||
height: 24px;
|
||||
background: #e9ecef;
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
margin: 1rem 0;
|
||||
}}
|
||||
.progress-fill {{
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, #667eea, #764ba2);
|
||||
transition: width 0.5s ease;
|
||||
}}
|
||||
.message {{
|
||||
color: #495057;
|
||||
margin: 0.5rem 0;
|
||||
}}
|
||||
.logs {{
|
||||
background: #f8f9fa;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
font-family: monospace;
|
||||
font-size: 0.85rem;
|
||||
}}
|
||||
.log-item {{
|
||||
padding: 0.25rem 0;
|
||||
border-bottom: 1px solid #e9ecef;
|
||||
}}
|
||||
.log-item:last-child {{ border-bottom: none; }}
|
||||
.timestamp {{
|
||||
color: #6c757d;
|
||||
font-size: 0.8rem;
|
||||
}}
|
||||
.log-level {{
|
||||
font-weight: bold;
|
||||
margin-right: 0.5rem;
|
||||
}}
|
||||
.log-level.info {{ color: #28a745; }}
|
||||
.log-level.error {{ color: #dc3545; }}
|
||||
.features {{
|
||||
margin-top: 1rem;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}}
|
||||
.feature-tag {{
|
||||
background: #e7f3ff;
|
||||
color: #0066cc;
|
||||
padding: 0.25rem 0.75rem;
|
||||
border-radius: 12px;
|
||||
font-size: 0.85rem;
|
||||
}}
|
||||
.audit-section {{
|
||||
background: #f8f9fa;
|
||||
border-radius: 12px;
|
||||
padding: 1.5rem;
|
||||
margin-top: 1rem;
|
||||
}}
|
||||
.audit-item {{
|
||||
background: white;
|
||||
border: 1px solid #dee2e6;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}}
|
||||
.audit-header {{
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 0.5rem;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}}
|
||||
.audit-action {{
|
||||
padding: 0.25rem 0.75rem;
|
||||
border-radius: 12px;
|
||||
font-size: 0.85rem;
|
||||
font-weight: bold;
|
||||
}}
|
||||
.audit-action.CREATE {{ background: #d4edda; color: #155724; }}
|
||||
.audit-action.UPDATE {{ background: #cce5ff; color: #004085; }}
|
||||
.audit-action.DELETE {{ background: #f8d7da; color: #721c24; }}
|
||||
.audit-action.PROMPT {{ background: #d1ecf1; color: #0c5460; }}
|
||||
.audit-action.COMMIT {{ background: #fff3cd; color: #856404; }}
|
||||
.audit-action.PR_CREATED {{ background: #d4edda; color: #155724; }}
|
||||
.audit-action.neutral {{ background: #e9ecef; color: #495057; }}
|
||||
.audit-actor {{
|
||||
background: #e9ecef;
|
||||
padding: 0.25rem 0.75rem;
|
||||
border-radius: 12px;
|
||||
font-size: 0.8rem;
|
||||
}}
|
||||
.audit-time {{
|
||||
color: #6c757d;
|
||||
font-size: 0.8rem;
|
||||
}}
|
||||
.audit-details {{
|
||||
color: #495057;
|
||||
font-size: 0.9rem;
|
||||
font-weight: bold;
|
||||
text-transform: uppercase;
|
||||
}}
|
||||
.audit-metadata {{
|
||||
background: #f1f3f5;
|
||||
padding: 0.5rem;
|
||||
border-radius: 4px;
|
||||
font-size: 0.75rem;
|
||||
font-family: monospace;
|
||||
margin-top: 0.5rem;
|
||||
max-height: 100px;
|
||||
overflow-y: auto;
|
||||
}}
|
||||
.action-item {{
|
||||
background: white;
|
||||
border: 1px solid #dee2e6;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}}
|
||||
.action-type {{
|
||||
font-weight: bold;
|
||||
color: #667eea;
|
||||
font-size: 0.9rem;
|
||||
}}
|
||||
.action-description {{
|
||||
color: #495057;
|
||||
margin: 0.5rem 0;
|
||||
}}
|
||||
.action-actor {{
|
||||
color: #6c757d;
|
||||
font-size: 0.8rem;
|
||||
}}
|
||||
.snapshot-section {{
|
||||
background: #f8f9fa;
|
||||
border-radius: 12px;
|
||||
padding: 1.5rem;
|
||||
margin-top: 1rem;
|
||||
}}
|
||||
.snapshot-item {{
|
||||
background: white;
|
||||
border: 1px solid #dee2e6;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}}
|
||||
.snapshot-time {{
|
||||
color: #6c757d;
|
||||
font-size: 0.8rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}}
|
||||
.snapshot-data {{
|
||||
background: #f8f9fa;
|
||||
padding: 0.5rem;
|
||||
border-radius: 4px;
|
||||
font-family: monospace;
|
||||
font-size: 0.75rem;
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}}
|
||||
.empty-state {{
|
||||
text-align: center;
|
||||
color: #6c757d;
|
||||
padding: 2rem;
|
||||
}}
|
||||
@media (max-width: 768px) {{
|
||||
.container {{
|
||||
padding: 1rem;
|
||||
}}
|
||||
h1 {{
|
||||
font-size: 1.5rem;
|
||||
}}
|
||||
}}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>AI Software Factory Dashboard</h1>
|
||||
|
||||
<div class="project">
|
||||
<div class="project-header">
|
||||
<span class="project-name">{project_id_escaped}</span>
|
||||
<span class="status-badge {status}">
|
||||
{status.upper()}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="progress-bar">
|
||||
<div class="progress-fill" style="width: {self.ui_data.get('progress', 0)}%;"></div>
|
||||
</div>
|
||||
|
||||
<div class="message">{self._escape_html(self.ui_data.get('message', 'No message'))}</div>
|
||||
|
||||
{f'<div class="logs" id="logs">{logs_html}</div>' if logs else '<div class="empty-state">No logs available</div>'}
|
||||
|
||||
{features_html}
|
||||
</div>
|
||||
|
||||
{f'<div class="audit-section"><h2>Audit Trail</h2>{audit_html}</div>' if audit_html else ''}
|
||||
|
||||
{f'<div class="action-section"><h2>User Actions</h2>{actions_html}</div>' if actions_html else ''}
|
||||
|
||||
{f'<div class="snapshot-section"><h2>UI Snapshots</h2>{snapshots_html}</div>' if snapshots_html else ''}
|
||||
|
||||
{empty_state_message}
|
||||
</div>
|
||||
</body>
|
||||
</html>"""
|
||||
37
ai_software_factory/alembic.ini
Normal file
37
ai_software_factory/alembic.ini
Normal file
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
prepend_sys_path = .
|
||||
path_separator = os
|
||||
sqlalchemy.url = sqlite:////tmp/ai_software_factory_test.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers = console
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
50
ai_software_factory/alembic/env.py
Normal file
50
ai_software_factory/alembic/env.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Alembic environment for AI Software Factory."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
try:
|
||||
from ai_software_factory.models import Base
|
||||
except ImportError:
|
||||
from models import Base
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in offline mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in online mode."""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
17
ai_software_factory/alembic/script.py.mako
Normal file
17
ai_software_factory/alembic/script.py.mako
Normal file
@@ -0,0 +1,17 @@
|
||||
"""${message}"""
|
||||
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,164 @@
|
||||
"""initial schema
|
||||
|
||||
Revision ID: 20260410_01
|
||||
Revises:
|
||||
Create Date: 2026-04-10 00:00:00
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision = "20260410_01"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"agent_actions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("agent_name", sa.String(length=100), nullable=False),
|
||||
sa.Column("action_type", sa.String(length=100), nullable=False),
|
||||
sa.Column("success", sa.Boolean(), nullable=True),
|
||||
sa.Column("message", sa.String(length=500), nullable=True),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"audit_trail",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("component", sa.String(length=50), nullable=True),
|
||||
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||
sa.Column("message", sa.String(length=500), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("project_id", sa.String(length=255), nullable=True),
|
||||
sa.Column("action", sa.String(length=100), nullable=True),
|
||||
sa.Column("actor", sa.String(length=100), nullable=True),
|
||||
sa.Column("action_type", sa.String(length=50), nullable=True),
|
||||
sa.Column("details", sa.Text(), nullable=True),
|
||||
sa.Column("metadata_json", sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"project_history",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("project_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("project_name", sa.String(length=255), nullable=True),
|
||||
sa.Column("features", sa.Text(), nullable=True),
|
||||
sa.Column("description", sa.String(length=255), nullable=True),
|
||||
sa.Column("status", sa.String(length=50), nullable=True),
|
||||
sa.Column("progress", sa.Integer(), nullable=True),
|
||||
sa.Column("message", sa.String(length=500), nullable=True),
|
||||
sa.Column("current_step", sa.String(length=255), nullable=True),
|
||||
sa.Column("total_steps", sa.Integer(), nullable=True),
|
||||
sa.Column("current_step_description", sa.String(length=1024), nullable=True),
|
||||
sa.Column("current_step_details", sa.Text(), nullable=True),
|
||||
sa.Column("error_message", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("started_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("completed_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"system_logs",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("component", sa.String(length=50), nullable=False),
|
||||
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||
sa.Column("log_message", sa.String(length=500), nullable=False),
|
||||
sa.Column("user_agent", sa.String(length=255), nullable=True),
|
||||
sa.Column("ip_address", sa.String(length=45), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"project_logs",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||
sa.Column("log_level", sa.String(length=50), nullable=True),
|
||||
sa.Column("log_message", sa.String(length=500), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"prompt_code_links",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||
sa.Column("project_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("prompt_audit_id", sa.Integer(), nullable=False),
|
||||
sa.Column("code_change_audit_id", sa.Integer(), nullable=False),
|
||||
sa.Column("file_path", sa.String(length=500), nullable=True),
|
||||
sa.Column("change_type", sa.String(length=50), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"pull_request_data",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||
sa.Column("pr_number", sa.Integer(), nullable=False),
|
||||
sa.Column("pr_title", sa.String(length=500), nullable=False),
|
||||
sa.Column("pr_body", sa.Text(), nullable=True),
|
||||
sa.Column("pr_state", sa.String(length=50), nullable=False),
|
||||
sa.Column("pr_url", sa.String(length=500), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"pull_requests",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||
sa.Column("pr_number", sa.Integer(), nullable=False),
|
||||
sa.Column("pr_title", sa.String(length=500), nullable=False),
|
||||
sa.Column("pr_body", sa.Text(), nullable=True),
|
||||
sa.Column("base", sa.String(length=255), nullable=False),
|
||||
sa.Column("user", sa.String(length=255), nullable=False),
|
||||
sa.Column("pr_url", sa.String(length=500), nullable=False),
|
||||
sa.Column("merged", sa.Boolean(), nullable=True),
|
||||
sa.Column("merged_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("pr_state", sa.String(length=50), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"ui_snapshots",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=False),
|
||||
sa.Column("snapshot_data", sa.JSON(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"user_actions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("history_id", sa.Integer(), nullable=True),
|
||||
sa.Column("user_id", sa.String(length=100), nullable=True),
|
||||
sa.Column("action_type", sa.String(length=100), nullable=True),
|
||||
sa.Column("actor_type", sa.String(length=50), nullable=True),
|
||||
sa.Column("actor_name", sa.String(length=100), nullable=True),
|
||||
sa.Column("action_description", sa.String(length=500), nullable=True),
|
||||
sa.Column("action_data", sa.JSON(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["history_id"], ["project_history.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("user_actions")
|
||||
op.drop_table("ui_snapshots")
|
||||
op.drop_table("pull_requests")
|
||||
op.drop_table("pull_request_data")
|
||||
op.drop_table("prompt_code_links")
|
||||
op.drop_table("project_logs")
|
||||
op.drop_table("system_logs")
|
||||
op.drop_table("project_history")
|
||||
op.drop_table("audit_trail")
|
||||
op.drop_table("agent_actions")
|
||||
232
ai_software_factory/config.py
Normal file
232
ai_software_factory/config.py
Normal file
@@ -0,0 +1,232 @@
|
||||
"""Configuration settings for AI Software Factory."""
|
||||
|
||||
import os
|
||||
from typing import Optional
|
||||
from pathlib import Path
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings loaded from environment variables."""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
# Server settings
|
||||
HOST: str = "0.0.0.0"
|
||||
PORT: int = 8000
|
||||
LOG_LEVEL: str = "INFO"
|
||||
|
||||
# Ollama settings computed from environment
|
||||
OLLAMA_URL: str = "http://ollama:11434"
|
||||
OLLAMA_MODEL: str = "llama3"
|
||||
|
||||
# Gitea settings
|
||||
GITEA_URL: str = "https://gitea.yourserver.com"
|
||||
GITEA_TOKEN: str = ""
|
||||
GITEA_OWNER: str = "ai-software-factory"
|
||||
GITEA_REPO: str = ""
|
||||
|
||||
# n8n settings
|
||||
N8N_WEBHOOK_URL: str = ""
|
||||
N8N_API_URL: str = ""
|
||||
N8N_API_KEY: str = ""
|
||||
N8N_TELEGRAM_CREDENTIAL_NAME: str = "AI Software Factory Telegram"
|
||||
N8N_USER: str = ""
|
||||
N8N_PASSWORD: str = ""
|
||||
|
||||
# Runtime integration settings
|
||||
BACKEND_PUBLIC_URL: str = "http://localhost:8000"
|
||||
PROJECTS_ROOT: str = ""
|
||||
|
||||
# Telegram settings
|
||||
TELEGRAM_BOT_TOKEN: str = ""
|
||||
TELEGRAM_CHAT_ID: str = ""
|
||||
|
||||
# PostgreSQL settings
|
||||
POSTGRES_HOST: str = "localhost"
|
||||
POSTGRES_PORT: int = 5432
|
||||
POSTGRES_USER: str = "postgres"
|
||||
POSTGRES_PASSWORD: str = ""
|
||||
POSTGRES_DB: str = "ai_software_factory"
|
||||
POSTGRES_TEST_DB: str = "ai_software_factory_test"
|
||||
POSTGRES_URL: Optional[str] = None # Optional direct PostgreSQL connection URL
|
||||
|
||||
# SQLite settings for testing
|
||||
USE_SQLITE: bool = True # Enable SQLite by default for testing
|
||||
SQLITE_DB_PATH: str = "sqlite.db"
|
||||
|
||||
# Database connection pool settings (only for PostgreSQL)
|
||||
DB_POOL_SIZE: int = 10
|
||||
DB_MAX_OVERFLOW: int = 20
|
||||
DB_POOL_RECYCLE: int = 3600
|
||||
DB_POOL_TIMEOUT: int = 30
|
||||
|
||||
@property
|
||||
def postgres_url(self) -> str:
|
||||
"""Get PostgreSQL URL with trimmed whitespace."""
|
||||
return (self.POSTGRES_URL or "").strip()
|
||||
|
||||
@property
|
||||
def postgres_env_configured(self) -> bool:
|
||||
"""Whether PostgreSQL was explicitly configured via environment variables."""
|
||||
if self.postgres_url:
|
||||
return True
|
||||
postgres_env_keys = (
|
||||
"POSTGRES_HOST",
|
||||
"POSTGRES_PORT",
|
||||
"POSTGRES_USER",
|
||||
"POSTGRES_PASSWORD",
|
||||
"POSTGRES_DB",
|
||||
)
|
||||
return any(bool(os.environ.get(key, "").strip()) for key in postgres_env_keys)
|
||||
|
||||
@property
|
||||
def use_sqlite(self) -> bool:
|
||||
"""Whether SQLite should be used as the active database backend."""
|
||||
if not self.USE_SQLITE:
|
||||
return False
|
||||
return not self.postgres_env_configured
|
||||
|
||||
@property
|
||||
def pool(self) -> dict:
|
||||
"""Get database pool configuration."""
|
||||
return {
|
||||
"pool_size": self.DB_POOL_SIZE,
|
||||
"max_overflow": self.DB_MAX_OVERFLOW,
|
||||
"pool_recycle": self.DB_POOL_RECYCLE,
|
||||
"pool_timeout": self.DB_POOL_TIMEOUT
|
||||
}
|
||||
|
||||
@property
|
||||
def database_url(self) -> str:
|
||||
"""Get database connection URL."""
|
||||
if self.use_sqlite:
|
||||
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
||||
if self.postgres_url:
|
||||
return self.postgres_url
|
||||
return (
|
||||
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
||||
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
|
||||
)
|
||||
|
||||
@property
|
||||
def test_database_url(self) -> str:
|
||||
"""Get test database connection URL."""
|
||||
if self.use_sqlite:
|
||||
return f"sqlite:///{self.SQLITE_DB_PATH}"
|
||||
if self.postgres_url:
|
||||
return self.postgres_url
|
||||
return (
|
||||
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
|
||||
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_TEST_DB}"
|
||||
)
|
||||
|
||||
@property
|
||||
def ollama_url(self) -> str:
|
||||
"""Get Ollama URL with trimmed whitespace."""
|
||||
return self.OLLAMA_URL.strip()
|
||||
|
||||
@property
|
||||
def gitea_url(self) -> str:
|
||||
"""Get Gitea URL with trimmed whitespace."""
|
||||
return self.GITEA_URL.strip()
|
||||
|
||||
@property
|
||||
def gitea_token(self) -> str:
|
||||
"""Get Gitea token with trimmed whitespace."""
|
||||
return self.GITEA_TOKEN.strip()
|
||||
|
||||
@property
|
||||
def gitea_owner(self) -> str:
|
||||
"""Get Gitea owner/organization with trimmed whitespace."""
|
||||
return self.GITEA_OWNER.strip()
|
||||
|
||||
@property
|
||||
def gitea_repo(self) -> str:
|
||||
"""Get the optional fixed Gitea repository name with trimmed whitespace."""
|
||||
return self.GITEA_REPO.strip()
|
||||
|
||||
@property
|
||||
def use_project_repositories(self) -> bool:
|
||||
"""Whether the service should create one repository per generated project."""
|
||||
return not bool(self.gitea_repo)
|
||||
|
||||
@property
|
||||
def n8n_webhook_url(self) -> str:
|
||||
"""Get n8n webhook URL with trimmed whitespace."""
|
||||
return self.N8N_WEBHOOK_URL.strip()
|
||||
|
||||
@property
|
||||
def n8n_api_url(self) -> str:
|
||||
"""Get n8n API URL with trimmed whitespace."""
|
||||
return self.N8N_API_URL.strip()
|
||||
|
||||
@property
|
||||
def n8n_api_key(self) -> str:
|
||||
"""Get n8n API key with trimmed whitespace."""
|
||||
return self.N8N_API_KEY.strip()
|
||||
|
||||
@property
|
||||
def n8n_telegram_credential_name(self) -> str:
|
||||
"""Get the preferred n8n Telegram credential name."""
|
||||
return self.N8N_TELEGRAM_CREDENTIAL_NAME.strip() or "AI Software Factory Telegram"
|
||||
|
||||
@property
|
||||
def telegram_bot_token(self) -> str:
|
||||
"""Get Telegram bot token with trimmed whitespace."""
|
||||
return self.TELEGRAM_BOT_TOKEN.strip()
|
||||
|
||||
@property
|
||||
def telegram_chat_id(self) -> str:
|
||||
"""Get Telegram chat ID with trimmed whitespace."""
|
||||
return self.TELEGRAM_CHAT_ID.strip()
|
||||
|
||||
@property
|
||||
def backend_public_url(self) -> str:
|
||||
"""Get backend public URL with trimmed whitespace."""
|
||||
return self.BACKEND_PUBLIC_URL.strip().rstrip("/")
|
||||
|
||||
@property
|
||||
def projects_root(self) -> Path:
|
||||
"""Get the root directory for generated project artifacts."""
|
||||
if self.PROJECTS_ROOT.strip():
|
||||
return Path(self.PROJECTS_ROOT).expanduser().resolve()
|
||||
return Path(__file__).resolve().parent.parent / "test-project"
|
||||
|
||||
@property
|
||||
def postgres_host(self) -> str:
|
||||
"""Get PostgreSQL host."""
|
||||
return self.POSTGRES_HOST.strip()
|
||||
|
||||
@property
|
||||
def postgres_port(self) -> int:
|
||||
"""Get PostgreSQL port as integer."""
|
||||
return int(self.POSTGRES_PORT)
|
||||
|
||||
@property
|
||||
def postgres_user(self) -> str:
|
||||
"""Get PostgreSQL user."""
|
||||
return self.POSTGRES_USER.strip()
|
||||
|
||||
@property
|
||||
def postgres_password(self) -> str:
|
||||
"""Get PostgreSQL password."""
|
||||
return self.POSTGRES_PASSWORD.strip()
|
||||
|
||||
@property
|
||||
def postgres_db(self) -> str:
|
||||
"""Get PostgreSQL database name."""
|
||||
return self.POSTGRES_DB.strip()
|
||||
|
||||
@property
|
||||
def postgres_test_db(self) -> str:
|
||||
"""Get test PostgreSQL database name."""
|
||||
return self.POSTGRES_TEST_DB.strip()
|
||||
|
||||
# Create instance for module-level access
|
||||
settings = Settings()
|
||||
322
ai_software_factory/dashboard.html
Normal file
322
ai_software_factory/dashboard.html
Normal file
@@ -0,0 +1,322 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AI Software Factory Dashboard</title>
|
||||
<style>
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
|
||||
min-height: 100vh;
|
||||
color: #fff;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.dashboard {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.header {
|
||||
text-align: center;
|
||||
padding: 30px;
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
margin-bottom: 20px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.header h1 {
|
||||
font-size: 2.5em;
|
||||
margin-bottom: 10px;
|
||||
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
}
|
||||
|
||||
.header p {
|
||||
color: #888;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.stats-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
|
||||
gap: 20px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.stat-card {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
padding: 25px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.stat-card h3 {
|
||||
font-size: 0.9em;
|
||||
color: #888;
|
||||
margin-bottom: 10px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.stat-card .value {
|
||||
font-size: 2.5em;
|
||||
font-weight: bold;
|
||||
color: #00d4ff;
|
||||
}
|
||||
|
||||
.stat-card.project .value { color: #00ff88; }
|
||||
.stat-card.active .value { color: #ff6b6b; }
|
||||
.stat-card.code .value { color: #ffd93d; }
|
||||
|
||||
.status-panel {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
padding: 25px;
|
||||
margin-bottom: 20px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.status-panel h2 {
|
||||
font-size: 1.3em;
|
||||
margin-bottom: 15px;
|
||||
color: #00d4ff;
|
||||
}
|
||||
|
||||
.status-bar {
|
||||
height: 20px;
|
||||
background: #2a2a4a;
|
||||
border-radius: 10px;
|
||||
overflow: hidden;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.status-fill {
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
||||
border-radius: 10px;
|
||||
transition: width 0.5s ease;
|
||||
}
|
||||
|
||||
.message {
|
||||
padding: 10px;
|
||||
background: rgba(0, 212, 255, 0.1);
|
||||
border-radius: 8px;
|
||||
border-left: 4px solid #00d4ff;
|
||||
}
|
||||
|
||||
.projects-section {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
padding: 25px;
|
||||
margin-bottom: 20px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.projects-section h2 {
|
||||
font-size: 1.3em;
|
||||
margin-bottom: 15px;
|
||||
color: #00ff88;
|
||||
}
|
||||
|
||||
.projects-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.project-item {
|
||||
background: rgba(0, 255, 136, 0.1);
|
||||
padding: 15px 20px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid rgba(0, 255, 136, 0.3);
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.project-item.active {
|
||||
background: rgba(255, 107, 107, 0.1);
|
||||
border-color: rgba(255, 107, 107, 0.3);
|
||||
}
|
||||
|
||||
.audit-section {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
padding: 25px;
|
||||
margin-bottom: 20px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.audit-section h2 {
|
||||
font-size: 1.3em;
|
||||
margin-bottom: 15px;
|
||||
color: #ffd93d;
|
||||
}
|
||||
|
||||
.audit-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.audit-table th, .audit-table td {
|
||||
padding: 12px;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
.audit-table th {
|
||||
color: #888;
|
||||
font-weight: 600;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.audit-table td {
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.audit-table .timestamp {
|
||||
color: #666;
|
||||
font-size: 0.8em;
|
||||
}
|
||||
|
||||
.actions-panel {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
border-radius: 15px;
|
||||
padding: 25px;
|
||||
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.actions-panel h2 {
|
||||
font-size: 1.3em;
|
||||
margin-bottom: 15px;
|
||||
color: #ff6b6b;
|
||||
}
|
||||
|
||||
.actions-panel p {
|
||||
color: #888;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.stats-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.projects-list {
|
||||
flex-direction: column;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="dashboard">
|
||||
<div class="header">
|
||||
<h1>🚀 AI Software Factory</h1>
|
||||
<p>Real-time Dashboard & Audit Trail Display</p>
|
||||
</div>
|
||||
|
||||
<div class="stats-grid">
|
||||
<div class="stat-card project">
|
||||
<h3>Current Project</h3>
|
||||
<div class="value">test-project</div>
|
||||
</div>
|
||||
<div class="stat-card active">
|
||||
<h3>Active Projects</h3>
|
||||
<div class="value">1</div>
|
||||
</div>
|
||||
<div class="stat-card code">
|
||||
<h3>Code Generated</h3>
|
||||
<div class="value">12.4 KB</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Status</h3>
|
||||
<div class="value" id="status-value">running</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="status-panel">
|
||||
<h2>📊 Current Status</h2>
|
||||
<div class="status-bar">
|
||||
<div class="status-fill" id="status-fill" style="width: 75%"></div>
|
||||
</div>
|
||||
<div class="message">
|
||||
<strong>Generating code...</strong><br>
|
||||
<span style="color: #888;">Progress: 75%</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="projects-section">
|
||||
<h2>📁 Active Projects</h2>
|
||||
<div class="projects-list">
|
||||
<div class="project-item active">
|
||||
<strong>test-project</strong> • Agent: Orchestrator • Last update: just now
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="audit-section">
|
||||
<h2>📜 Audit Trail</h2>
|
||||
<table class="audit-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Timestamp</th>
|
||||
<th>Agent</th>
|
||||
<th>Action</th>
|
||||
<th>Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="timestamp">2026-03-22 01:41:00</td>
|
||||
<td>Orchestrator</td>
|
||||
<td>Initialized project</td>
|
||||
<td style="color: #00ff88;">Success</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="timestamp">2026-03-22 01:41:05</td>
|
||||
<td>Git Manager</td>
|
||||
<td>Initialized git repository</td>
|
||||
<td style="color: #00ff88;">Success</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="timestamp">2026-03-22 01:41:10</td>
|
||||
<td>Code Generator</td>
|
||||
<td>Generated main.py</td>
|
||||
<td style="color: #00ff88;">Success</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="timestamp">2026-03-22 01:41:15</td>
|
||||
<td>Code Generator</td>
|
||||
<td>Generated requirements.txt</td>
|
||||
<td style="color: #00ff88;">Success</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="timestamp">2026-03-22 01:41:18</td>
|
||||
<td>Orchestrator</td>
|
||||
<td>Running</td>
|
||||
<td style="color: #00d4ff;">In Progress</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="actions-panel">
|
||||
<h2>⚙️ System Actions</h2>
|
||||
<p>Dashboard is rendering successfully. The UI manager is active and monitoring all projects.</p>
|
||||
<p style="color: #888; font-size: 0.9em;">This dashboard is powered by the UIManager component and displays real-time status updates, audit trails, and project information.</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
1223
ai_software_factory/dashboard_ui.py
Normal file
1223
ai_software_factory/dashboard_ui.py
Normal file
File diff suppressed because it is too large
Load Diff
230
ai_software_factory/database.py
Normal file
230
ai_software_factory/database.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""Database connection and session management."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from sqlalchemy import create_engine, event, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
try:
|
||||
from .config import settings
|
||||
from .models import Base
|
||||
except ImportError:
|
||||
from config import settings
|
||||
from models import Base
|
||||
|
||||
|
||||
def get_database_runtime_summary() -> dict[str, str]:
|
||||
"""Return a human-readable summary of the effective database backend."""
|
||||
if settings.use_sqlite:
|
||||
db_path = str(Path(settings.SQLITE_DB_PATH or "/tmp/ai_software_factory_test.db").expanduser().resolve())
|
||||
return {
|
||||
"backend": "sqlite",
|
||||
"target": db_path,
|
||||
"database": db_path,
|
||||
}
|
||||
|
||||
parsed = urlparse(settings.database_url)
|
||||
database_name = parsed.path.lstrip("/") or "unknown"
|
||||
host = parsed.hostname or "unknown-host"
|
||||
port = str(parsed.port or 5432)
|
||||
return {
|
||||
"backend": parsed.scheme.split("+", 1)[0] or "postgresql",
|
||||
"target": f"{host}:{port}/{database_name}",
|
||||
"database": database_name,
|
||||
}
|
||||
|
||||
|
||||
def get_engine() -> Engine:
|
||||
"""Create and return SQLAlchemy engine with connection pooling."""
|
||||
# Use SQLite for tests, PostgreSQL for production
|
||||
if settings.use_sqlite:
|
||||
db_path = settings.SQLITE_DB_PATH or "/tmp/ai_software_factory_test.db"
|
||||
Path(db_path).expanduser().resolve().parent.mkdir(parents=True, exist_ok=True)
|
||||
db_url = f"sqlite:///{db_path}"
|
||||
# SQLite-specific configuration - no pooling for SQLite
|
||||
engine = create_engine(
|
||||
db_url,
|
||||
connect_args={"check_same_thread": False},
|
||||
echo=settings.LOG_LEVEL == "DEBUG"
|
||||
)
|
||||
else:
|
||||
db_url = settings.database_url
|
||||
# PostgreSQL-specific configuration
|
||||
engine = create_engine(
|
||||
db_url,
|
||||
pool_size=settings.DB_POOL_SIZE or 10,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW or 20,
|
||||
pool_pre_ping=settings.LOG_LEVEL == "DEBUG",
|
||||
echo=settings.LOG_LEVEL == "DEBUG",
|
||||
pool_timeout=settings.DB_POOL_TIMEOUT or 30
|
||||
)
|
||||
|
||||
# Event listener for connection checkout (PostgreSQL only)
|
||||
if not settings.use_sqlite:
|
||||
@event.listens_for(engine, "checkout")
|
||||
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
||||
"""Log connection checkout for audit purposes."""
|
||||
if settings.LOG_LEVEL in ("DEBUG", "INFO"):
|
||||
print(f"DB Connection checked out from pool")
|
||||
|
||||
@event.listens_for(engine, "checkin")
|
||||
def receive_checkin(dbapi_connection, connection_record):
|
||||
"""Log connection checkin for audit purposes."""
|
||||
if settings.LOG_LEVEL == "DEBUG":
|
||||
print(f"DB Connection returned to pool")
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
def get_session() -> Generator[Session, None, None]:
|
||||
"""Yield a managed database session."""
|
||||
engine = get_engine()
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
session = SessionLocal()
|
||||
try:
|
||||
yield session
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
"""Dependency for FastAPI routes that need database access."""
|
||||
yield from get_session()
|
||||
|
||||
|
||||
def get_db_sync() -> Session:
|
||||
"""Get a database session directly (for non-FastAPI/NiceGUI usage)."""
|
||||
engine = get_engine()
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
session = SessionLocal()
|
||||
return session
|
||||
|
||||
|
||||
def get_db_session() -> Session:
|
||||
"""Get a database session directly (for non-FastAPI usage)."""
|
||||
session = next(get_session())
|
||||
return session
|
||||
|
||||
|
||||
def get_alembic_config(database_url: str | None = None) -> Config:
|
||||
"""Return an Alembic config bound to the active database URL."""
|
||||
package_root = Path(__file__).resolve().parent
|
||||
alembic_ini = package_root / "alembic.ini"
|
||||
config = Config(str(alembic_ini))
|
||||
config.set_main_option("script_location", str(package_root / "alembic"))
|
||||
config.set_main_option("sqlalchemy.url", database_url or settings.database_url)
|
||||
return config
|
||||
|
||||
|
||||
def run_migrations(database_url: str | None = None) -> dict:
|
||||
"""Apply Alembic migrations to the configured database."""
|
||||
try:
|
||||
config = get_alembic_config(database_url)
|
||||
command.upgrade(config, "head")
|
||||
return {"status": "success", "message": "Database migrations applied."}
|
||||
except Exception as exc:
|
||||
return {"status": "error", "message": str(exc)}
|
||||
|
||||
|
||||
def init_db() -> dict:
|
||||
"""Initialize database tables and database if needed."""
|
||||
if settings.use_sqlite:
|
||||
result = run_migrations()
|
||||
if result["status"] == "success":
|
||||
print("SQLite database migrations applied successfully.")
|
||||
return {"status": "success", "message": "SQLite database initialized via migrations."}
|
||||
engine = get_engine()
|
||||
try:
|
||||
Base.metadata.create_all(bind=engine)
|
||||
print("SQLite database tables created successfully.")
|
||||
return {"status": "success", "message": "SQLite database initialized with metadata fallback."}
|
||||
except Exception as e:
|
||||
print(f"Error initializing SQLite database: {str(e)}")
|
||||
return {'status': 'error', 'message': f'Error: {str(e)}'}
|
||||
else:
|
||||
# PostgreSQL
|
||||
db_url = settings.database_url
|
||||
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
||||
|
||||
try:
|
||||
# Create engine to check/create database
|
||||
engine = create_engine(db_url)
|
||||
|
||||
# Try to create database if it doesn't exist
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# Check if database exists
|
||||
result = conn.execute(text(f"SELECT 1 FROM {db_name} WHERE 1=0"))
|
||||
# If no error, database exists
|
||||
conn.commit()
|
||||
print(f"PostgreSQL database '{db_name}' already exists.")
|
||||
except Exception as e:
|
||||
# Database doesn't exist or has different error - try to create it
|
||||
error_msg = str(e).lower()
|
||||
# Only create if it's a relation does not exist error or similar
|
||||
if "does not exist" in error_msg or "database" in error_msg:
|
||||
try:
|
||||
conn = engine.connect()
|
||||
conn.execute(text(f"CREATE DATABASE {db_name}"))
|
||||
conn.commit()
|
||||
print(f"PostgreSQL database '{db_name}' created.")
|
||||
except Exception as db_error:
|
||||
print(f"Could not create database: {str(db_error)}")
|
||||
# Try to connect anyway - maybe using existing db name
|
||||
engine = create_engine(db_url.replace(f'/{db_name}', '/postgres'))
|
||||
with engine.connect() as conn:
|
||||
# Just create tables in postgres database for now
|
||||
print(f"Using existing 'postgres' database.")
|
||||
|
||||
migration_result = run_migrations(db_url)
|
||||
if migration_result["status"] == "success":
|
||||
print(f"PostgreSQL database '{db_name}' migrations applied successfully.")
|
||||
return {'status': 'success', 'message': f'PostgreSQL database "{db_name}" initialized via migrations.'}
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
print(f"PostgreSQL database '{db_name}' tables created successfully.")
|
||||
return {'status': 'success', 'message': f'PostgreSQL database "{db_name}" initialized with metadata fallback.'}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error initializing PostgreSQL database: {str(e)}")
|
||||
return {'status': 'error', 'message': f'Error: {str(e)}'}
|
||||
|
||||
|
||||
def drop_db() -> dict:
|
||||
"""Drop all database tables (use with caution!)."""
|
||||
if settings.use_sqlite:
|
||||
engine = get_engine()
|
||||
try:
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
print("SQLite database tables dropped successfully.")
|
||||
return {'status': 'success', 'message': 'SQLite tables dropped.'}
|
||||
except Exception as e:
|
||||
print(f"Error dropping SQLite tables: {str(e)}")
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
else:
|
||||
db_url = settings.database_url
|
||||
db_name = db_url.split('/')[-1] if '/' in db_url else 'ai_software_factory'
|
||||
|
||||
try:
|
||||
engine = create_engine(db_url)
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
print(f"PostgreSQL database '{db_name}' tables dropped successfully.")
|
||||
return {'status': 'success', 'message': f'PostgreSQL "{db_name}" tables dropped.'}
|
||||
except Exception as e:
|
||||
print(f"Error dropping PostgreSQL tables: {str(e)}")
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
|
||||
def create_migration_script() -> str:
|
||||
"""Generate a migration script for database schema changes."""
|
||||
return """See ai_software_factory/alembic/versions for managed schema migrations."""
|
||||
53
ai_software_factory/frontend.py
Normal file
53
ai_software_factory/frontend.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Frontend module for NiceGUI with FastAPI integration.
|
||||
|
||||
This module provides the NiceGUI frontend that can be initialized with a FastAPI app.
|
||||
The dashboard shown is from dashboard_ui.py with real-time database data.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import RedirectResponse
|
||||
|
||||
from nicegui import app, ui
|
||||
|
||||
try:
|
||||
from .dashboard_ui import create_dashboard, create_health_page
|
||||
except ImportError:
|
||||
from dashboard_ui import create_dashboard, create_health_page
|
||||
|
||||
|
||||
def init(fastapi_app: FastAPI, storage_secret: str = 'Secr2t!') -> None:
|
||||
"""Initialize the NiceGUI frontend with the FastAPI app.
|
||||
|
||||
Args:
|
||||
fastapi_app: The FastAPI application instance.
|
||||
storage_secret: Optional secret for persistent user storage.
|
||||
"""
|
||||
|
||||
def render_dashboard_page() -> None:
|
||||
ui.page_title('AI Software Factory')
|
||||
create_dashboard()
|
||||
|
||||
# NOTE dark mode will be persistent for each user across tabs and server restarts
|
||||
ui.dark_mode().bind_value(app.storage.user, 'dark_mode')
|
||||
ui.checkbox('dark mode').bind_value(app.storage.user, 'dark_mode')
|
||||
|
||||
@ui.page('/')
|
||||
def home() -> None:
|
||||
render_dashboard_page()
|
||||
|
||||
@ui.page('/show')
|
||||
def show() -> None:
|
||||
render_dashboard_page()
|
||||
|
||||
@ui.page('/health-ui')
|
||||
def health_ui() -> None:
|
||||
create_health_page()
|
||||
|
||||
@fastapi_app.get('/dashboard', include_in_schema=False)
|
||||
def dashboard_redirect() -> RedirectResponse:
|
||||
return RedirectResponse(url='/', status_code=307)
|
||||
|
||||
ui.run_with(
|
||||
fastapi_app,
|
||||
storage_secret=storage_secret, # NOTE setting a secret is optional but allows for persistent storage per user
|
||||
)
|
||||
724
ai_software_factory/main.py
Normal file
724
ai_software_factory/main.py
Normal file
@@ -0,0 +1,724 @@
|
||||
#!/usr/bin/env python3
|
||||
"""AI Software Factory - Main application with FastAPI backend and NiceGUI frontend.
|
||||
|
||||
This application uses FastAPI to:
|
||||
1. Provide HTTP API endpoints
|
||||
2. Host NiceGUI frontend via ui.run_with()
|
||||
|
||||
The NiceGUI frontend provides:
|
||||
1. Interactive dashboard at /
|
||||
2. Real-time data visualization
|
||||
3. Audit trail display
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException, Query
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
try:
|
||||
from . import __version__, frontend
|
||||
from . import database as database_module
|
||||
from .agents.change_summary import ChangeSummaryGenerator
|
||||
from .agents.database_manager import DatabaseManager
|
||||
from .agents.request_interpreter import RequestInterpreter
|
||||
from .agents.orchestrator import AgentOrchestrator
|
||||
from .agents.n8n_setup import N8NSetupAgent
|
||||
from .agents.prompt_workflow import PromptWorkflowManager
|
||||
from .agents.ui_manager import UIManager
|
||||
from .models import ProjectHistory, ProjectLog, SystemLog
|
||||
except ImportError:
|
||||
import frontend
|
||||
import database as database_module
|
||||
from agents.change_summary import ChangeSummaryGenerator
|
||||
from agents.database_manager import DatabaseManager
|
||||
from agents.request_interpreter import RequestInterpreter
|
||||
from agents.orchestrator import AgentOrchestrator
|
||||
from agents.n8n_setup import N8NSetupAgent
|
||||
from agents.prompt_workflow import PromptWorkflowManager
|
||||
from agents.ui_manager import UIManager
|
||||
from models import ProjectHistory, ProjectLog, SystemLog
|
||||
|
||||
__version__ = "0.0.1"
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_app: FastAPI):
|
||||
"""Log resolved runtime configuration when the app starts."""
|
||||
runtime = database_module.get_database_runtime_summary()
|
||||
print(
|
||||
f"Runtime configuration: database_backend={runtime['backend']} target={runtime['target']}"
|
||||
)
|
||||
yield
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
DbSession = Annotated[Session, Depends(database_module.get_db)]
|
||||
PROJECT_ID_PATTERN = re.compile(r"[^a-z0-9]+")
|
||||
|
||||
|
||||
class SoftwareRequest(BaseModel):
|
||||
"""Request body for software generation."""
|
||||
|
||||
name: str = Field(min_length=1, max_length=255)
|
||||
description: str = Field(min_length=1, max_length=255)
|
||||
features: list[str] = Field(default_factory=list)
|
||||
tech_stack: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class N8NSetupRequest(BaseModel):
|
||||
"""Request body for n8n workflow provisioning."""
|
||||
|
||||
api_url: str | None = None
|
||||
api_key: str | None = None
|
||||
webhook_path: str = "telegram"
|
||||
backend_url: str | None = None
|
||||
force_update: bool = False
|
||||
|
||||
|
||||
class FreeformSoftwareRequest(BaseModel):
|
||||
"""Request body for free-form software generation."""
|
||||
|
||||
prompt_text: str = Field(min_length=1)
|
||||
source: str = 'telegram'
|
||||
chat_id: str | None = None
|
||||
chat_type: str | None = None
|
||||
|
||||
|
||||
class GiteaRepositoryOnboardRequest(BaseModel):
|
||||
"""Request body for onboarding a manually created Gitea repository."""
|
||||
|
||||
repo_name: str = Field(min_length=1, max_length=255)
|
||||
owner: str | None = None
|
||||
sync_commits: bool = True
|
||||
commit_limit: int = Field(default=25, ge=1, le=200)
|
||||
|
||||
|
||||
def _build_project_id(name: str) -> str:
|
||||
"""Create a stable project id from the requested name."""
|
||||
slug = PROJECT_ID_PATTERN.sub("-", name.strip().lower()).strip("-") or "project"
|
||||
return f"{slug}-{uuid4().hex[:8]}"
|
||||
|
||||
|
||||
def _serialize_project(history: ProjectHistory) -> dict:
|
||||
"""Serialize a project history row for API responses."""
|
||||
return {
|
||||
"history_id": history.id,
|
||||
"project_id": history.project_id,
|
||||
"name": history.project_name,
|
||||
"description": history.description,
|
||||
"status": history.status,
|
||||
"progress": history.progress,
|
||||
"message": history.message,
|
||||
"current_step": history.current_step,
|
||||
"error_message": history.error_message,
|
||||
"created_at": history.created_at.isoformat() if history.created_at else None,
|
||||
"updated_at": history.updated_at.isoformat() if history.updated_at else None,
|
||||
"completed_at": history.completed_at.isoformat() if history.completed_at else None,
|
||||
}
|
||||
|
||||
|
||||
def _serialize_project_log(log: ProjectLog) -> dict:
|
||||
"""Serialize a project log row."""
|
||||
return {
|
||||
"id": log.id,
|
||||
"history_id": log.history_id,
|
||||
"level": log.log_level,
|
||||
"message": log.log_message,
|
||||
"timestamp": log.timestamp.isoformat() if log.timestamp else None,
|
||||
}
|
||||
|
||||
|
||||
def _serialize_system_log(log: SystemLog) -> dict:
|
||||
"""Serialize a system log row."""
|
||||
return {
|
||||
"id": log.id,
|
||||
"component": log.component,
|
||||
"level": log.log_level,
|
||||
"message": log.log_message,
|
||||
"user_agent": log.user_agent,
|
||||
"ip_address": log.ip_address,
|
||||
"timestamp": log.created_at.isoformat() if log.created_at else None,
|
||||
}
|
||||
|
||||
|
||||
def _serialize_audit_item(item: dict) -> dict:
|
||||
"""Return audit-shaped dictionaries unchanged for API output."""
|
||||
return item
|
||||
|
||||
|
||||
def _compose_prompt_text(request: SoftwareRequest) -> str:
|
||||
"""Render the originating software request into a stable prompt string."""
|
||||
features = ", ".join(request.features) if request.features else "None"
|
||||
tech_stack = ", ".join(request.tech_stack) if request.tech_stack else "None"
|
||||
return (
|
||||
f"Name: {request.name}\n"
|
||||
f"Description: {request.description}\n"
|
||||
f"Features: {features}\n"
|
||||
f"Tech Stack: {tech_stack}"
|
||||
)
|
||||
|
||||
|
||||
async def _run_generation(
|
||||
request: SoftwareRequest,
|
||||
db: Session,
|
||||
prompt_text: str | None = None,
|
||||
prompt_actor: str = 'api',
|
||||
prompt_source_context: dict | None = None,
|
||||
prompt_routing: dict | None = None,
|
||||
preferred_project_id: str | None = None,
|
||||
related_issue: dict | None = None,
|
||||
) -> dict:
|
||||
"""Run the shared generation pipeline for a structured request."""
|
||||
database_module.init_db()
|
||||
|
||||
manager = DatabaseManager(db)
|
||||
reusable_history = manager.get_project_by_id(preferred_project_id) if preferred_project_id else manager.get_latest_project_by_name(request.name)
|
||||
if reusable_history and database_module.settings.gitea_url and database_module.settings.gitea_token:
|
||||
try:
|
||||
from .agents.gitea import GiteaAPI
|
||||
except ImportError:
|
||||
from agents.gitea import GiteaAPI
|
||||
manager.sync_pull_request_states(
|
||||
GiteaAPI(
|
||||
token=database_module.settings.GITEA_TOKEN,
|
||||
base_url=database_module.settings.GITEA_URL,
|
||||
owner=database_module.settings.GITEA_OWNER,
|
||||
repo=database_module.settings.GITEA_REPO or '',
|
||||
),
|
||||
project_id=reusable_history.project_id,
|
||||
)
|
||||
if preferred_project_id and reusable_history is not None:
|
||||
project_id = reusable_history.project_id
|
||||
elif reusable_history and manager.get_open_pull_request(project_id=reusable_history.project_id):
|
||||
project_id = reusable_history.project_id
|
||||
else:
|
||||
project_id = _build_project_id(request.name)
|
||||
reusable_history = None
|
||||
resolved_prompt_text = prompt_text or _compose_prompt_text(request)
|
||||
orchestrator = AgentOrchestrator(
|
||||
project_id=project_id,
|
||||
project_name=request.name,
|
||||
description=request.description,
|
||||
features=request.features,
|
||||
tech_stack=request.tech_stack,
|
||||
db=db,
|
||||
prompt_text=resolved_prompt_text,
|
||||
prompt_actor=prompt_actor,
|
||||
existing_history=reusable_history,
|
||||
prompt_source_context=prompt_source_context,
|
||||
prompt_routing=prompt_routing,
|
||||
related_issue_hint=related_issue,
|
||||
)
|
||||
result = await orchestrator.run()
|
||||
|
||||
manager = DatabaseManager(db)
|
||||
manager.log_system_event(
|
||||
component='api',
|
||||
level='INFO' if result['status'] == 'completed' else 'ERROR',
|
||||
message=f"Generated project {project_id} with {len(result.get('changed_files', []))} artifact(s)",
|
||||
)
|
||||
|
||||
history = manager.get_project_by_id(project_id)
|
||||
project_logs = manager.get_project_logs(history.id)
|
||||
response_data = _serialize_project(history)
|
||||
response_data['logs'] = [_serialize_project_log(log) for log in project_logs]
|
||||
response_data['ui_data'] = result.get('ui_data')
|
||||
response_data['features'] = request.features
|
||||
response_data['tech_stack'] = request.tech_stack
|
||||
response_data['project_root'] = result.get('project_root', str(_project_root(project_id)))
|
||||
response_data['changed_files'] = result.get('changed_files', [])
|
||||
response_data['repository'] = result.get('repository')
|
||||
response_data['related_issue'] = result.get('related_issue') or (result.get('ui_data') or {}).get('related_issue')
|
||||
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
|
||||
summary_context = {
|
||||
'name': response_data['name'],
|
||||
'description': response_data['description'],
|
||||
'features': response_data['features'],
|
||||
'tech_stack': response_data['tech_stack'],
|
||||
'changed_files': response_data['changed_files'],
|
||||
'repository_url': (
|
||||
(response_data.get('repository') or {}).get('url')
|
||||
if isinstance(response_data.get('repository'), dict)
|
||||
and (response_data.get('repository') or {}).get('status') in {'created', 'exists', 'ready', 'shared'}
|
||||
else None
|
||||
),
|
||||
'repository_status': (response_data.get('repository') or {}).get('status') if isinstance(response_data.get('repository'), dict) else None,
|
||||
'pull_request_url': (response_data.get('pull_request') or {}).get('pr_url') if isinstance(response_data.get('pull_request'), dict) else None,
|
||||
'pull_request_state': (response_data.get('pull_request') or {}).get('pr_state') if isinstance(response_data.get('pull_request'), dict) else None,
|
||||
'related_issue': response_data.get('related_issue'),
|
||||
'message': response_data.get('message'),
|
||||
'logs': [log.get('message', '') for log in response_data.get('logs', []) if isinstance(log, dict)],
|
||||
}
|
||||
summary_message, summary_trace = await ChangeSummaryGenerator().summarize_with_trace(summary_context)
|
||||
if orchestrator.db_manager and orchestrator.history and orchestrator.prompt_audit:
|
||||
orchestrator.db_manager.log_llm_trace(
|
||||
project_id=project_id,
|
||||
history_id=orchestrator.history.id,
|
||||
prompt_id=orchestrator.prompt_audit.id,
|
||||
stage=summary_trace['stage'],
|
||||
provider=summary_trace['provider'],
|
||||
model=summary_trace['model'],
|
||||
system_prompt=summary_trace['system_prompt'],
|
||||
user_prompt=summary_trace['user_prompt'],
|
||||
assistant_response=summary_trace['assistant_response'],
|
||||
raw_response=summary_trace.get('raw_response'),
|
||||
fallback_used=summary_trace.get('fallback_used', False),
|
||||
)
|
||||
response_data['summary_message'] = summary_message
|
||||
response_data['pull_request'] = result.get('pull_request') or manager.get_open_pull_request(project_id=project_id)
|
||||
return {'status': result['status'], 'data': response_data, 'summary_message': summary_message}
|
||||
|
||||
|
||||
def _project_root(project_id: str) -> Path:
|
||||
"""Resolve the filesystem location for a generated project."""
|
||||
return database_module.settings.projects_root / project_id
|
||||
|
||||
|
||||
def _create_gitea_api():
|
||||
"""Create a configured Gitea client or raise an HTTP error if unavailable."""
|
||||
if not database_module.settings.gitea_url or not database_module.settings.gitea_token:
|
||||
raise HTTPException(status_code=400, detail='Gitea integration is not configured')
|
||||
try:
|
||||
from .agents.gitea import GiteaAPI
|
||||
except ImportError:
|
||||
from agents.gitea import GiteaAPI
|
||||
return GiteaAPI(
|
||||
token=database_module.settings.GITEA_TOKEN,
|
||||
base_url=database_module.settings.GITEA_URL,
|
||||
owner=database_module.settings.GITEA_OWNER,
|
||||
repo=database_module.settings.GITEA_REPO or '',
|
||||
)
|
||||
|
||||
|
||||
def _resolve_n8n_api_url(explicit_url: str | None = None) -> str:
|
||||
"""Resolve the effective n8n API URL from explicit input or settings."""
|
||||
if explicit_url and explicit_url.strip():
|
||||
return explicit_url.strip()
|
||||
if database_module.settings.n8n_api_url:
|
||||
return database_module.settings.n8n_api_url
|
||||
webhook_url = database_module.settings.n8n_webhook_url
|
||||
if webhook_url:
|
||||
return webhook_url.split("/webhook", 1)[0].rstrip("/")
|
||||
return ""
|
||||
|
||||
|
||||
@app.get('/api')
|
||||
def read_api_info():
|
||||
"""Return service metadata for API clients."""
|
||||
return {
|
||||
'service': 'AI Software Factory',
|
||||
'version': __version__,
|
||||
'endpoints': [
|
||||
'/',
|
||||
'/api',
|
||||
'/health',
|
||||
'/generate',
|
||||
'/generate/text',
|
||||
'/projects',
|
||||
'/status/{project_id}',
|
||||
'/audit/projects',
|
||||
'/audit/logs',
|
||||
'/audit/system/logs',
|
||||
'/audit/prompts',
|
||||
'/audit/changes',
|
||||
'/audit/issues',
|
||||
'/audit/commit-context',
|
||||
'/audit/timeline',
|
||||
'/audit/llm-traces',
|
||||
'/audit/pull-requests',
|
||||
'/audit/lineage',
|
||||
'/audit/correlations',
|
||||
'/projects/{project_id}/prompts/{prompt_id}/undo',
|
||||
'/projects/{project_id}/sync-repository',
|
||||
'/gitea/repos',
|
||||
'/gitea/repos/onboard',
|
||||
'/n8n/health',
|
||||
'/n8n/setup',
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@app.get('/health')
|
||||
def health_check():
|
||||
"""Health check endpoint."""
|
||||
runtime = database_module.get_database_runtime_summary()
|
||||
return {
|
||||
'status': 'healthy',
|
||||
'database': runtime['backend'],
|
||||
'database_target': runtime['target'],
|
||||
'database_name': runtime['database'],
|
||||
}
|
||||
|
||||
|
||||
@app.post('/generate')
|
||||
async def generate_software(request: SoftwareRequest, db: DbSession):
|
||||
"""Create and record a software-generation request."""
|
||||
return await _run_generation(request, db)
|
||||
|
||||
|
||||
@app.post('/generate/text')
|
||||
async def generate_software_from_text(request: FreeformSoftwareRequest, db: DbSession):
|
||||
"""Interpret a free-form request and run generation."""
|
||||
if (
|
||||
request.source == 'telegram'
|
||||
and database_module.settings.telegram_chat_id
|
||||
and request.chat_id
|
||||
and str(request.chat_id) != str(database_module.settings.telegram_chat_id)
|
||||
):
|
||||
return {
|
||||
'status': 'ignored',
|
||||
'message': f"Ignoring Telegram message from chat {request.chat_id}",
|
||||
'source': {
|
||||
'type': request.source,
|
||||
'chat_id': request.chat_id,
|
||||
'chat_type': request.chat_type,
|
||||
},
|
||||
}
|
||||
|
||||
manager = DatabaseManager(db)
|
||||
interpreter_context = manager.get_interpreter_context(chat_id=request.chat_id, source=request.source)
|
||||
interpreted, interpretation_trace = await RequestInterpreter().interpret_with_trace(
|
||||
request.prompt_text,
|
||||
context=interpreter_context,
|
||||
)
|
||||
routing = interpretation_trace.get('routing') or {}
|
||||
selected_history = manager.get_project_by_id(routing.get('project_id')) if routing.get('project_id') else None
|
||||
if selected_history is not None and routing.get('intent') != 'new_project':
|
||||
interpreted['name'] = selected_history.project_name
|
||||
interpreted['description'] = selected_history.description or interpreted['description']
|
||||
structured_request = SoftwareRequest(**interpreted)
|
||||
response = await _run_generation(
|
||||
structured_request,
|
||||
db,
|
||||
prompt_text=request.prompt_text,
|
||||
prompt_actor=request.source,
|
||||
prompt_source_context={
|
||||
'chat_id': request.chat_id,
|
||||
'chat_type': request.chat_type,
|
||||
},
|
||||
prompt_routing=routing,
|
||||
preferred_project_id=routing.get('project_id') if routing.get('intent') != 'new_project' else None,
|
||||
related_issue={'number': routing.get('issue_number')} if routing.get('issue_number') is not None else None,
|
||||
)
|
||||
project_data = response.get('data', {})
|
||||
if project_data.get('history_id') is not None:
|
||||
manager = DatabaseManager(db)
|
||||
prompts = manager.get_prompt_events(project_id=project_data.get('project_id'))
|
||||
prompt_id = prompts[0]['id'] if prompts else None
|
||||
manager.log_llm_trace(
|
||||
project_id=project_data.get('project_id'),
|
||||
history_id=project_data.get('history_id'),
|
||||
prompt_id=prompt_id,
|
||||
stage=interpretation_trace['stage'],
|
||||
provider=interpretation_trace['provider'],
|
||||
model=interpretation_trace['model'],
|
||||
system_prompt=interpretation_trace['system_prompt'],
|
||||
user_prompt=interpretation_trace['user_prompt'],
|
||||
assistant_response=interpretation_trace['assistant_response'],
|
||||
raw_response=interpretation_trace.get('raw_response'),
|
||||
fallback_used=interpretation_trace.get('fallback_used', False),
|
||||
)
|
||||
response['interpreted_request'] = interpreted
|
||||
response['routing'] = routing
|
||||
response['llm_trace'] = interpretation_trace
|
||||
response['source'] = {
|
||||
'type': request.source,
|
||||
'chat_id': request.chat_id,
|
||||
'chat_type': request.chat_type,
|
||||
}
|
||||
return response
|
||||
|
||||
|
||||
@app.get('/projects')
|
||||
def list_projects(db: DbSession):
|
||||
"""List recorded projects."""
|
||||
manager = DatabaseManager(db)
|
||||
projects = manager.get_all_projects()
|
||||
return {'projects': [_serialize_project(project) for project in projects]}
|
||||
|
||||
|
||||
@app.get('/status/{project_id}')
|
||||
def get_project_status(project_id: str, db: DbSession):
|
||||
"""Get the current status for a single project."""
|
||||
manager = DatabaseManager(db)
|
||||
history = manager.get_project_by_id(project_id)
|
||||
if history is None:
|
||||
raise HTTPException(status_code=404, detail='Project not found')
|
||||
return _serialize_project(history)
|
||||
|
||||
|
||||
@app.get('/audit/projects')
|
||||
def get_audit_projects(db: DbSession):
|
||||
"""Return projects together with their related logs and audit data."""
|
||||
manager = DatabaseManager(db)
|
||||
projects = []
|
||||
for history in manager.get_all_projects():
|
||||
project_data = _serialize_project(history)
|
||||
audit_data = manager.get_project_audit_data(history.project_id)
|
||||
project_data['logs'] = audit_data['logs']
|
||||
project_data['actions'] = audit_data['actions']
|
||||
project_data['audit_trail'] = audit_data['audit_trail']
|
||||
projects.append(project_data)
|
||||
return {'projects': projects}
|
||||
|
||||
|
||||
@app.get('/audit/prompts')
|
||||
def get_prompt_audit(db: DbSession, project_id: str | None = Query(default=None)):
|
||||
"""Return stored prompt submissions."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'prompts': [_serialize_audit_item(item) for item in manager.get_prompt_events(project_id=project_id)]}
|
||||
|
||||
|
||||
@app.get('/audit/changes')
|
||||
def get_code_change_audit(db: DbSession, project_id: str | None = Query(default=None)):
|
||||
"""Return recorded code changes."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'changes': [_serialize_audit_item(item) for item in manager.get_code_changes(project_id=project_id)]}
|
||||
|
||||
|
||||
@app.get('/audit/issues')
|
||||
def get_issue_audit(
|
||||
db: DbSession,
|
||||
project_id: str | None = Query(default=None),
|
||||
state: str | None = Query(default=None),
|
||||
):
|
||||
"""Return tracked repository issues and issue-work events."""
|
||||
manager = DatabaseManager(db)
|
||||
return {
|
||||
'issues': manager.get_repository_issues(project_id=project_id, state=state),
|
||||
'issue_work': manager.get_issue_work_events(project_id=project_id),
|
||||
}
|
||||
|
||||
|
||||
@app.get('/audit/commit-context')
|
||||
def get_commit_context_audit(
|
||||
db: DbSession,
|
||||
commit_hash: str = Query(min_length=4),
|
||||
project_id: str | None = Query(default=None),
|
||||
branch_scope: str | None = Query(default=None, pattern='^(main|pr|manual)?$'),
|
||||
):
|
||||
"""Return the recorded context explaining how a commit came to be."""
|
||||
manager = DatabaseManager(db)
|
||||
context = manager.get_commit_context(commit_hash=commit_hash, project_id=project_id, branch_scope=branch_scope)
|
||||
if context is None:
|
||||
raise HTTPException(status_code=404, detail='Commit context not found')
|
||||
return context
|
||||
|
||||
|
||||
@app.get('/audit/timeline')
|
||||
def get_project_timeline_audit(
|
||||
db: DbSession,
|
||||
project_id: str = Query(min_length=1),
|
||||
branch_scope: str | None = Query(default=None, pattern='^(main|pr|manual)?$'),
|
||||
):
|
||||
"""Return the mixed audit timeline for one project."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'timeline': manager.get_project_timeline(project_id=project_id, branch_scope=branch_scope)}
|
||||
|
||||
|
||||
@app.get('/audit/llm-traces')
|
||||
def get_llm_trace_audit(
|
||||
db: DbSession,
|
||||
project_id: str | None = Query(default=None),
|
||||
prompt_id: int | None = Query(default=None),
|
||||
stage: str | None = Query(default=None),
|
||||
model: str | None = Query(default=None),
|
||||
search: str | None = Query(default=None),
|
||||
):
|
||||
"""Return persisted LLM traces."""
|
||||
manager = DatabaseManager(db)
|
||||
return {
|
||||
'llm_traces': manager.get_llm_traces(
|
||||
project_id=project_id,
|
||||
prompt_id=prompt_id,
|
||||
stage=stage,
|
||||
model=model,
|
||||
search_query=search,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.get('/audit/lineage')
|
||||
def get_prompt_change_lineage(db: DbSession, project_id: str | None = Query(default=None)):
|
||||
"""Return explicit prompt-to-code lineage rows."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'lineage': manager.get_prompt_change_links(project_id=project_id)}
|
||||
|
||||
|
||||
@app.get('/audit/correlations')
|
||||
def get_prompt_change_correlations(db: DbSession, project_id: str | None = Query(default=None)):
|
||||
"""Return prompt-to-change correlations for generated projects."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'correlations': manager.get_prompt_change_correlations(project_id=project_id)}
|
||||
|
||||
|
||||
@app.get('/audit/pull-requests')
|
||||
def get_pull_request_audit(db: DbSession, project_id: str | None = Query(default=None), open_only: bool = Query(default=False)):
|
||||
"""Return tracked pull requests for generated projects."""
|
||||
manager = DatabaseManager(db)
|
||||
return {'pull_requests': manager.get_pull_requests(project_id=project_id, only_open=open_only)}
|
||||
|
||||
|
||||
@app.post('/projects/{project_id}/prompts/{prompt_id}/undo')
|
||||
async def undo_prompt_changes(project_id: str, prompt_id: int, db: DbSession):
|
||||
"""Undo all changes associated with a specific prompt."""
|
||||
result = await PromptWorkflowManager(db).undo_prompt(project_id=project_id, prompt_id=prompt_id)
|
||||
if result.get('status') == 'error':
|
||||
raise HTTPException(status_code=400, detail=result.get('message', 'Undo failed'))
|
||||
return result
|
||||
|
||||
|
||||
@app.post('/projects/{project_id}/sync-repository')
|
||||
def sync_project_repository(project_id: str, db: DbSession, commit_limit: int = Query(default=25, ge=1, le=200)):
|
||||
"""Import recent repository activity from Gitea for a tracked project."""
|
||||
manager = DatabaseManager(db)
|
||||
gitea_api = _create_gitea_api()
|
||||
result = manager.sync_repository_activity(project_id=project_id, gitea_api=gitea_api, commit_limit=commit_limit)
|
||||
if result.get('status') == 'error':
|
||||
raise HTTPException(status_code=400, detail=result.get('message', 'Repository sync failed'))
|
||||
manager.sync_repository_issues(project_id=project_id, gitea_api=gitea_api, state='open')
|
||||
return result
|
||||
|
||||
|
||||
@app.get('/gitea/repos')
|
||||
def list_gitea_repositories(db: DbSession, owner: str | None = Query(default=None)):
|
||||
"""List repositories in the configured Gitea organization and whether they are already onboarded."""
|
||||
gitea_api = _create_gitea_api()
|
||||
resolved_owner = owner or database_module.settings.gitea_owner
|
||||
repos = gitea_api.list_repositories_sync(owner=resolved_owner)
|
||||
if isinstance(repos, dict) and repos.get('error'):
|
||||
raise HTTPException(status_code=502, detail=repos.get('error'))
|
||||
manager = DatabaseManager(db)
|
||||
items = []
|
||||
for repo in repos if isinstance(repos, list) else []:
|
||||
tracked_project = manager.get_project_by_repository(resolved_owner, repo.get('name', ''))
|
||||
items.append(
|
||||
{
|
||||
'name': repo.get('name'),
|
||||
'full_name': repo.get('full_name') or f"{resolved_owner}/{repo.get('name')}",
|
||||
'description': repo.get('description'),
|
||||
'html_url': repo.get('html_url'),
|
||||
'clone_url': repo.get('clone_url'),
|
||||
'default_branch': repo.get('default_branch'),
|
||||
'private': bool(repo.get('private', False)),
|
||||
'onboarded': tracked_project is not None,
|
||||
'project_id': tracked_project.project_id if tracked_project is not None else None,
|
||||
}
|
||||
)
|
||||
return {'repositories': items}
|
||||
|
||||
|
||||
@app.post('/gitea/repos/onboard')
|
||||
async def onboard_gitea_repository(request: GiteaRepositoryOnboardRequest, db: DbSession):
|
||||
"""Onboard a manually created Gitea repository into the factory dashboard."""
|
||||
gitea_api = _create_gitea_api()
|
||||
owner = request.owner or database_module.settings.gitea_owner
|
||||
repo = await gitea_api.get_repo_info(owner=owner, repo=request.repo_name)
|
||||
if isinstance(repo, dict) and repo.get('error'):
|
||||
raise HTTPException(status_code=404, detail=repo.get('error'))
|
||||
manager = DatabaseManager(db)
|
||||
onboarded = manager.onboard_repository(owner=owner, repo_name=request.repo_name, repository_data=repo)
|
||||
manager.sync_repository_issues(project_id=onboarded['project_id'], gitea_api=gitea_api, state='open')
|
||||
sync_result = None
|
||||
if request.sync_commits:
|
||||
sync_result = manager.sync_repository_activity(
|
||||
project_id=onboarded['project_id'],
|
||||
gitea_api=gitea_api,
|
||||
commit_limit=request.commit_limit,
|
||||
)
|
||||
return {
|
||||
'status': 'success',
|
||||
'onboarded': onboarded,
|
||||
'sync_result': sync_result,
|
||||
}
|
||||
|
||||
|
||||
@app.get('/audit/logs')
|
||||
def get_audit_logs(db: DbSession):
|
||||
"""Return all project logs ordered newest first."""
|
||||
logs = db.query(ProjectLog).order_by(ProjectLog.id.desc()).all()
|
||||
return {'logs': [_serialize_project_log(log) for log in logs]}
|
||||
|
||||
|
||||
@app.get('/audit/system/logs')
|
||||
def get_system_audit_logs(
|
||||
db: DbSession,
|
||||
component: str | None = Query(default=None),
|
||||
):
|
||||
"""Return system logs with optional component filtering."""
|
||||
query = db.query(SystemLog).order_by(SystemLog.id.desc())
|
||||
if component:
|
||||
query = query.filter(SystemLog.component == component)
|
||||
return {'logs': [_serialize_system_log(log) for log in query.all()]}
|
||||
|
||||
|
||||
@app.get('/n8n/health')
|
||||
async def get_n8n_health():
|
||||
"""Check whether the configured n8n instance is reachable."""
|
||||
api_url = _resolve_n8n_api_url()
|
||||
if not api_url:
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': 'N8N_API_URL or N8N_WEBHOOK_URL is not configured.',
|
||||
'api_url': '',
|
||||
'auth_configured': bool(database_module.settings.n8n_api_key),
|
||||
'checks': [],
|
||||
'suggestion': 'Set N8N_API_URL to the base n8n address before provisioning workflows.',
|
||||
}
|
||||
agent = N8NSetupAgent(api_url=api_url, webhook_token=database_module.settings.n8n_api_key)
|
||||
return await agent.health_check()
|
||||
|
||||
|
||||
@app.post('/n8n/setup')
|
||||
async def setup_n8n_workflow(request: N8NSetupRequest, db: DbSession):
|
||||
"""Create or update the n8n Telegram workflow."""
|
||||
api_url = _resolve_n8n_api_url(request.api_url)
|
||||
if not api_url:
|
||||
raise HTTPException(status_code=400, detail='n8n API URL is not configured')
|
||||
|
||||
agent = N8NSetupAgent(
|
||||
api_url=api_url,
|
||||
webhook_token=(request.api_key or database_module.settings.n8n_api_key),
|
||||
)
|
||||
result = await agent.setup(
|
||||
webhook_path=request.webhook_path,
|
||||
backend_url=request.backend_url or f"{database_module.settings.backend_public_url}/generate/text",
|
||||
force_update=request.force_update,
|
||||
telegram_bot_token=database_module.settings.telegram_bot_token,
|
||||
telegram_credential_name=database_module.settings.n8n_telegram_credential_name,
|
||||
)
|
||||
|
||||
manager = DatabaseManager(db)
|
||||
log_level = 'INFO' if result.get('status') != 'error' else 'ERROR'
|
||||
manager.log_system_event(
|
||||
component='n8n',
|
||||
level=log_level,
|
||||
message=result.get('message', json.dumps(result)),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@app.post('/init-db')
|
||||
def initialize_database():
|
||||
"""Initialize database tables (POST endpoint for NiceGUI to call before dashboard)."""
|
||||
try:
|
||||
database_module.init_db()
|
||||
return {'message': 'Database tables created successfully', 'status': 'success'}
|
||||
except Exception as e:
|
||||
return {'message': f'Error initializing database: {str(e)}', 'status': 'error'}
|
||||
|
||||
|
||||
frontend.init(app)
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Please start the app with the "uvicorn" command as shown in the start.sh script')
|
||||
192
ai_software_factory/models.py
Normal file
192
ai_software_factory/models.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""Database models for AI Software Factory."""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
|
||||
from sqlalchemy import (
|
||||
Column, Integer, String, Text, Boolean, ForeignKey, DateTime, JSON
|
||||
)
|
||||
from sqlalchemy.orm import relationship, declarative_base
|
||||
|
||||
try:
|
||||
from .config import settings
|
||||
except ImportError:
|
||||
from config import settings
|
||||
|
||||
Base = declarative_base()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProjectStatus(str, Enum):
|
||||
"""Project status enumeration."""
|
||||
INITIALIZED = "initialized"
|
||||
STARTED = "started"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class ProjectHistory(Base):
|
||||
"""Main project tracking table."""
|
||||
__tablename__ = "project_history"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
project_id = Column(String(255), nullable=False)
|
||||
project_name = Column(String(255), nullable=True)
|
||||
features = Column(Text, default="")
|
||||
description = Column(String(255), default="")
|
||||
status = Column(String(50), default='started')
|
||||
progress = Column(Integer, default=0)
|
||||
message = Column(String(500), default="")
|
||||
current_step = Column(String(255), nullable=True)
|
||||
total_steps = Column(Integer, nullable=True)
|
||||
current_step_description = Column(String(1024), nullable=True)
|
||||
current_step_details = Column(Text, nullable=True)
|
||||
error_message = Column(Text, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
started_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
completed_at = Column(DateTime, nullable=True)
|
||||
|
||||
# Relationships
|
||||
project_logs = relationship("ProjectLog", back_populates="project_history", cascade="all, delete-orphan")
|
||||
ui_snapshots = relationship("UISnapshot", back_populates="project_history", cascade="all, delete-orphan")
|
||||
pull_requests = relationship("PullRequest", back_populates="project_history", cascade="all, delete-orphan")
|
||||
pull_request_data = relationship("PullRequestData", back_populates="project_history", cascade="all, delete-orphan")
|
||||
prompt_code_links = relationship("PromptCodeLink", back_populates="project_history", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class ProjectLog(Base):
|
||||
"""Detailed log entries for projects."""
|
||||
__tablename__ = "project_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||
log_level = Column(String(50), default="INFO") # INFO, WARNING, ERROR
|
||||
log_message = Column(String(500), nullable=False)
|
||||
timestamp = Column(DateTime, nullable=True)
|
||||
|
||||
project_history = relationship("ProjectHistory", back_populates="project_logs")
|
||||
|
||||
|
||||
class UISnapshot(Base):
|
||||
"""UI snapshots for projects."""
|
||||
__tablename__ = "ui_snapshots"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||
snapshot_data = Column(JSON, nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
project_history = relationship("ProjectHistory", back_populates="ui_snapshots")
|
||||
|
||||
|
||||
class PullRequest(Base):
|
||||
"""Pull request data for projects."""
|
||||
__tablename__ = "pull_requests"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||
pr_number = Column(Integer, nullable=False)
|
||||
pr_title = Column(String(500), nullable=False)
|
||||
pr_body = Column(Text)
|
||||
base = Column(String(255), nullable=False)
|
||||
user = Column(String(255), nullable=False)
|
||||
pr_url = Column(String(500), nullable=False)
|
||||
merged = Column(Boolean, default=False)
|
||||
merged_at = Column(DateTime, nullable=True)
|
||||
pr_state = Column(String(50), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
project_history = relationship("ProjectHistory", back_populates="pull_requests")
|
||||
|
||||
|
||||
class PullRequestData(Base):
|
||||
"""Pull request data for audit API."""
|
||||
__tablename__ = "pull_request_data"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||
pr_number = Column(Integer, nullable=False)
|
||||
pr_title = Column(String(500), nullable=False)
|
||||
pr_body = Column(Text)
|
||||
pr_state = Column(String(50), nullable=False)
|
||||
pr_url = Column(String(500), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
project_history = relationship("ProjectHistory", back_populates="pull_request_data")
|
||||
|
||||
|
||||
class SystemLog(Base):
|
||||
"""System-wide log entries."""
|
||||
__tablename__ = "system_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
component = Column(String(50), nullable=False)
|
||||
log_level = Column(String(50), default="INFO")
|
||||
log_message = Column(String(500), nullable=False)
|
||||
user_agent = Column(String(255), nullable=True)
|
||||
ip_address = Column(String(45), nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class AuditTrail(Base):
|
||||
"""Audit trail entries for system-wide logging."""
|
||||
__tablename__ = "audit_trail"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
component = Column(String(50), nullable=True)
|
||||
log_level = Column(String(50), default="INFO")
|
||||
message = Column(String(500), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
project_id = Column(String(255), nullable=True)
|
||||
action = Column(String(100), nullable=True)
|
||||
actor = Column(String(100), nullable=True)
|
||||
action_type = Column(String(50), nullable=True)
|
||||
details = Column(Text, nullable=True)
|
||||
metadata_json = Column(JSON, nullable=True)
|
||||
|
||||
|
||||
class PromptCodeLink(Base):
|
||||
"""Explicit lineage between a prompt event and a resulting code change."""
|
||||
__tablename__ = "prompt_code_links"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=False)
|
||||
project_id = Column(String(255), nullable=False)
|
||||
prompt_audit_id = Column(Integer, nullable=False)
|
||||
code_change_audit_id = Column(Integer, nullable=False)
|
||||
file_path = Column(String(500), nullable=True)
|
||||
change_type = Column(String(50), nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
project_history = relationship("ProjectHistory", back_populates="prompt_code_links")
|
||||
|
||||
|
||||
class UserAction(Base):
|
||||
"""User action audit entries."""
|
||||
__tablename__ = "user_actions"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
history_id = Column(Integer, ForeignKey("project_history.id"), nullable=True)
|
||||
user_id = Column(String(100), nullable=True)
|
||||
action_type = Column(String(100), nullable=True)
|
||||
actor_type = Column(String(50), nullable=True)
|
||||
actor_name = Column(String(100), nullable=True)
|
||||
action_description = Column(String(500), nullable=True)
|
||||
action_data = Column(JSON, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class AgentAction(Base):
|
||||
"""Agent action audit entries."""
|
||||
__tablename__ = "agent_actions"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
agent_name = Column(String(100), nullable=False)
|
||||
action_type = Column(String(100), nullable=False)
|
||||
success = Column(Boolean, default=True)
|
||||
message = Column(String(500), nullable=True)
|
||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
||||
10
ai_software_factory/pytest.ini
Normal file
10
ai_software_factory/pytest.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
pythonpath = .
|
||||
addopts = -v --tb=short
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
|
||||
asyncio_mode = auto
|
||||
asyncio_default_fixture_loop_scope = function
|
||||
asyncio_default_test_loop_scope = function
|
||||
21
ai_software_factory/requirements.txt
Normal file
21
ai_software_factory/requirements.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
fastapi>=0.135.3
|
||||
uvicorn[standard]==0.27.0
|
||||
sqlalchemy==2.0.25
|
||||
psycopg2-binary==2.9.9
|
||||
pydantic==2.12.5
|
||||
pydantic-settings==2.1.0
|
||||
python-multipart==0.0.22
|
||||
aiofiles==23.2.1
|
||||
python-telegram-bot==20.7
|
||||
requests==2.31.0
|
||||
pytest==7.4.3
|
||||
pytest-cov==4.1.0
|
||||
black==23.12.1
|
||||
isort==5.13.2
|
||||
flake8==6.1.0
|
||||
mypy==1.7.1
|
||||
httpx==0.25.2
|
||||
nicegui==3.9.0
|
||||
aiohttp>=3.9.0
|
||||
pytest-asyncio>=0.23.0
|
||||
alembic>=1.14.0
|
||||
17
ai_software_factory/start.sh
Normal file
17
ai_software_factory/start.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# use path of this example as working directory; enables starting this script from anywhere
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
if [ "$1" = "prod" ]; then
|
||||
echo "Starting Uvicorn server in production mode..."
|
||||
# we also use a single worker in production mode so socket.io connections are always handled by the same worker
|
||||
uvicorn main:app --workers 1 --log-level info --port 80
|
||||
elif [ "$1" = "dev" ]; then
|
||||
echo "Starting Uvicorn server in development mode..."
|
||||
# reload implies workers = 1
|
||||
uvicorn main:app --reload --log-level debug --port 8000
|
||||
else
|
||||
echo "Invalid parameter. Use 'prod' or 'dev'."
|
||||
exit 1
|
||||
fi
|
||||
7
ai_software_factory/test/README.md
Normal file
7
ai_software_factory/test/README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Test
|
||||
|
||||
Test
|
||||
|
||||
## Features
|
||||
|
||||
## Tech Stack
|
||||
2
ai_software_factory/test/main.py
Normal file
2
ai_software_factory/test/main.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Generated by AI Software Factory
|
||||
print('Hello, World!')
|
||||
400
ai_software_factory/testslogger.py
Normal file
400
ai_software_factory/testslogger.py
Normal file
@@ -0,0 +1,400 @@
|
||||
"""Test logging utility for validating agent responses and system outputs."""
|
||||
|
||||
import re
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
|
||||
# Color codes for terminal output
|
||||
class Colors:
|
||||
GREEN = '\033[92m'
|
||||
RED = '\033[91m'
|
||||
YELLOW = '\033[93m'
|
||||
BLUE = '\033[94m'
|
||||
CYAN = '\033[96m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
class TestLogger:
|
||||
"""Utility class for logging test results and assertions."""
|
||||
|
||||
def __init__(self):
|
||||
self.assertions: List[Dict[str, Any]] = []
|
||||
self.errors: List[Dict[str, Any]] = []
|
||||
self.logs: List[str] = []
|
||||
|
||||
def log(self, message: str, level: str = 'INFO') -> None:
|
||||
"""Log an informational message."""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
formatted = f"[{timestamp}] [{level}] {message}"
|
||||
self.logs.append(formatted)
|
||||
print(formatted)
|
||||
|
||||
def success(self, message: str) -> None:
|
||||
"""Log a success message with green color."""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
formatted = f"{Colors.GREEN}[{timestamp}] [✓ PASS] {message}{Colors.RESET}"
|
||||
self.logs.append(formatted)
|
||||
print(formatted)
|
||||
|
||||
def error(self, message: str) -> None:
|
||||
"""Log an error message with red color."""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
formatted = f"{Colors.RED}[{timestamp}] [✗ ERROR] {message}{Colors.RESET}"
|
||||
self.logs.append(formatted)
|
||||
print(formatted)
|
||||
|
||||
def warning(self, message: str) -> None:
|
||||
"""Log a warning message with yellow color."""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
formatted = f"{Colors.YELLOW}[{timestamp}] [!] WARN {message}{Colors.RESET}"
|
||||
self.logs.append(formatted)
|
||||
print(formatted)
|
||||
|
||||
def info(self, message: str) -> None:
|
||||
"""Log an info message with blue color."""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
formatted = f"{Colors.BLUE}[{timestamp}] [ℹ INFO] {message}{Colors.RESET}"
|
||||
self.logs.append(formatted)
|
||||
print(formatted)
|
||||
|
||||
def assert_contains(self, text: str, expected: str, message: str = '') -> bool:
|
||||
"""Assert that text contains expected substring."""
|
||||
try:
|
||||
contains = expected in text
|
||||
if contains:
|
||||
self.success(f"✓ '{expected}' found in text")
|
||||
self.assertions.append({
|
||||
'type': 'assert_contains',
|
||||
'result': 'pass',
|
||||
'expected': expected,
|
||||
'message': message or f"'{expected}' in text"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Expected '{expected}' not found in text")
|
||||
self.assertions.append({
|
||||
'type': 'assert_contains',
|
||||
'result': 'fail',
|
||||
'expected': expected,
|
||||
'message': message or f"'{expected}' in text"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_contains',
|
||||
'result': 'error',
|
||||
'expected': expected,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_not_contains(self, text: str, unexpected: str, message: str = '') -> bool:
|
||||
"""Assert that text does not contain expected substring."""
|
||||
try:
|
||||
contains = unexpected in text
|
||||
if not contains:
|
||||
self.success(f"✓ '{unexpected}' not found in text")
|
||||
self.assertions.append({
|
||||
'type': 'assert_not_contains',
|
||||
'result': 'pass',
|
||||
'unexpected': unexpected,
|
||||
'message': message or f"'{unexpected}' not in text"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Unexpected '{unexpected}' found in text")
|
||||
self.assertions.append({
|
||||
'type': 'assert_not_contains',
|
||||
'result': 'fail',
|
||||
'unexpected': unexpected,
|
||||
'message': message or f"'{unexpected}' not in text"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_not_contains',
|
||||
'result': 'error',
|
||||
'unexpected': unexpected,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_equal(self, actual: str, expected: str, message: str = '') -> bool:
|
||||
"""Assert that two strings are equal."""
|
||||
try:
|
||||
if actual == expected:
|
||||
self.success(f"✓ Strings equal")
|
||||
self.assertions.append({
|
||||
'type': 'assert_equal',
|
||||
'result': 'pass',
|
||||
'expected': expected,
|
||||
'message': message or f"actual == expected"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Strings not equal. Expected: '{expected}', Got: '{actual}'")
|
||||
self.assertions.append({
|
||||
'type': 'assert_equal',
|
||||
'result': 'fail',
|
||||
'expected': expected,
|
||||
'actual': actual,
|
||||
'message': message or "actual == expected"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_equal',
|
||||
'result': 'error',
|
||||
'expected': expected,
|
||||
'actual': actual,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_starts_with(self, text: str, prefix: str, message: str = '') -> bool:
|
||||
"""Assert that text starts with expected prefix."""
|
||||
try:
|
||||
starts_with = text.startswith(prefix)
|
||||
if starts_with:
|
||||
self.success(f"✓ Text starts with '{prefix}'")
|
||||
self.assertions.append({
|
||||
'type': 'assert_starts_with',
|
||||
'result': 'pass',
|
||||
'prefix': prefix,
|
||||
'message': message or f"text starts with '{prefix}'"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Text does not start with '{prefix}'")
|
||||
self.assertions.append({
|
||||
'type': 'assert_starts_with',
|
||||
'result': 'fail',
|
||||
'prefix': prefix,
|
||||
'message': message or f"text starts with '{prefix}'"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_starts_with',
|
||||
'result': 'error',
|
||||
'prefix': prefix,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_ends_with(self, text: str, suffix: str, message: str = '') -> bool:
|
||||
"""Assert that text ends with expected suffix."""
|
||||
try:
|
||||
ends_with = text.endswith(suffix)
|
||||
if ends_with:
|
||||
self.success(f"✓ Text ends with '{suffix}'")
|
||||
self.assertions.append({
|
||||
'type': 'assert_ends_with',
|
||||
'result': 'pass',
|
||||
'suffix': suffix,
|
||||
'message': message or f"text ends with '{suffix}'"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Text does not end with '{suffix}'")
|
||||
self.assertions.append({
|
||||
'type': 'assert_ends_with',
|
||||
'result': 'fail',
|
||||
'suffix': suffix,
|
||||
'message': message or f"text ends with '{suffix}'"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_ends_with',
|
||||
'result': 'error',
|
||||
'suffix': suffix,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_regex(self, text: str, pattern: str, message: str = '') -> bool:
|
||||
"""Assert that text matches a regex pattern."""
|
||||
try:
|
||||
if re.search(pattern, text):
|
||||
self.success(f"✓ Regex pattern matched")
|
||||
self.assertions.append({
|
||||
'type': 'assert_regex',
|
||||
'result': 'pass',
|
||||
'pattern': pattern,
|
||||
'message': message or f"text matches regex '{pattern}'"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Regex pattern did not match")
|
||||
self.assertions.append({
|
||||
'type': 'assert_regex',
|
||||
'result': 'fail',
|
||||
'pattern': pattern,
|
||||
'message': message or f"text matches regex '{pattern}'"
|
||||
})
|
||||
return False
|
||||
except re.error as e:
|
||||
self.error(f"✗ Invalid regex pattern: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_regex',
|
||||
'result': 'error',
|
||||
'pattern': pattern,
|
||||
'message': message or f"Invalid regex: {e}"
|
||||
})
|
||||
return False
|
||||
except Exception as ex:
|
||||
self.error(f"Assertion failed with exception: {ex}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_regex',
|
||||
'result': 'error',
|
||||
'pattern': pattern,
|
||||
'message': message or f"Assertion failed: {ex}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_length(self, text: str, expected_length: int, message: str = '') -> bool:
|
||||
"""Assert that text has expected length."""
|
||||
try:
|
||||
length = len(text)
|
||||
if length == expected_length:
|
||||
self.success(f"✓ Length is {expected_length}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_length',
|
||||
'result': 'pass',
|
||||
'expected_length': expected_length,
|
||||
'message': message or f"len(text) == {expected_length}"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Length is {length}, expected {expected_length}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_length',
|
||||
'result': 'fail',
|
||||
'expected_length': expected_length,
|
||||
'actual_length': length,
|
||||
'message': message or f"len(text) == {expected_length}"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_length',
|
||||
'result': 'error',
|
||||
'expected_length': expected_length,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_key_exists(self, text: str, key: str, message: str = '') -> bool:
|
||||
"""Assert that a key exists in a JSON-like text."""
|
||||
try:
|
||||
if f'"{key}":' in text or f"'{key}':" in text:
|
||||
self.success(f"✓ Key '{key}' exists")
|
||||
self.assertions.append({
|
||||
'type': 'assert_key_exists',
|
||||
'result': 'pass',
|
||||
'key': key,
|
||||
'message': message or f"key '{key}' exists"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Key '{key}' not found")
|
||||
self.assertions.append({
|
||||
'type': 'assert_key_exists',
|
||||
'result': 'fail',
|
||||
'key': key,
|
||||
'message': message or f"key '{key}' exists"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_key_exists',
|
||||
'result': 'error',
|
||||
'key': key,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def assert_substring_count(self, text: str, substring: str, count: int, message: str = '') -> bool:
|
||||
"""Assert that substring appears count times in text."""
|
||||
try:
|
||||
actual_count = text.count(substring)
|
||||
if actual_count == count:
|
||||
self.success(f"✓ Substring appears {count} time(s)")
|
||||
self.assertions.append({
|
||||
'type': 'assert_substring_count',
|
||||
'result': 'pass',
|
||||
'substring': substring,
|
||||
'expected_count': count,
|
||||
'actual_count': actual_count,
|
||||
'message': message or f"'{substring}' appears {count} times"
|
||||
})
|
||||
return True
|
||||
else:
|
||||
self.error(f"✗ Substring appears {actual_count} time(s), expected {count}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_substring_count',
|
||||
'result': 'fail',
|
||||
'substring': substring,
|
||||
'expected_count': count,
|
||||
'actual_count': actual_count,
|
||||
'message': message or f"'{substring}' appears {count} times"
|
||||
})
|
||||
return False
|
||||
except Exception as e:
|
||||
self.error(f"Assertion failed with exception: {e}")
|
||||
self.assertions.append({
|
||||
'type': 'assert_substring_count',
|
||||
'result': 'error',
|
||||
'substring': substring,
|
||||
'expected_count': count,
|
||||
'message': message or f"Assertion failed: {e}"
|
||||
})
|
||||
return False
|
||||
|
||||
def get_assertion_count(self) -> int:
|
||||
"""Get total number of assertions made."""
|
||||
return len(self.assertions)
|
||||
|
||||
def get_failure_count(self) -> int:
|
||||
"""Get number of failed assertions."""
|
||||
return sum(1 for assertion in self.assertions if assertion.get('result') == 'fail')
|
||||
|
||||
def get_success_count(self) -> int:
|
||||
"""Get number of passed assertions."""
|
||||
return sum(1 for assertion in self.assertions if assertion.get('result') == 'pass')
|
||||
|
||||
def get_logs(self) -> List[str]:
|
||||
"""Get all log messages."""
|
||||
return self.logs.copy()
|
||||
|
||||
def get_errors(self) -> List[Dict[str, Any]]:
|
||||
"""Get all error records."""
|
||||
return self.errors.copy()
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all logs and assertions."""
|
||||
self.assertions.clear()
|
||||
self.errors.clear()
|
||||
self.logs.clear()
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
return False
|
||||
|
||||
|
||||
# Convenience function for context manager usage
|
||||
def test_logger():
|
||||
"""Create and return a TestLogger instance."""
|
||||
return TestLogger()
|
||||
@@ -1 +0,0 @@
|
||||
0.0.1
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "Hello world"
|
||||
23
test-project/test/TestApp/.gitignore
vendored
Normal file
23
test-project/test/TestApp/.gitignore
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
*.env
|
||||
.venv/
|
||||
node_modules/
|
||||
.env
|
||||
build/
|
||||
dist/
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
.git
|
||||
11
test-project/test/TestApp/README.md
Normal file
11
test-project/test/TestApp/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# TestApp
|
||||
|
||||
A test application
|
||||
|
||||
## Features
|
||||
- feature1
|
||||
- feature2
|
||||
|
||||
## Tech Stack
|
||||
- python
|
||||
- fastapi
|
||||
2
test-project/test/TestApp/main.py
Normal file
2
test-project/test/TestApp/main.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Generated by AI Software Factory
|
||||
print('Hello, World!')
|
||||
23
test-project/test/test-project/.gitignore
vendored
Normal file
23
test-project/test/test-project/.gitignore
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
*.env
|
||||
.venv/
|
||||
node_modules/
|
||||
.env
|
||||
build/
|
||||
dist/
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
.git
|
||||
11
test-project/test/test-project/README.md
Normal file
11
test-project/test/test-project/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# test-project
|
||||
|
||||
Test project description
|
||||
|
||||
## Features
|
||||
- feature-1
|
||||
- feature-2
|
||||
|
||||
## Tech Stack
|
||||
- python
|
||||
- fastapi
|
||||
2
test-project/test/test-project/main.py
Normal file
2
test-project/test/test-project/main.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Generated by AI Software Factory
|
||||
print('Hello, World!')
|
||||
23
test-project/test/test/.gitignore
vendored
Normal file
23
test-project/test/test/.gitignore
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
*.env
|
||||
.venv/
|
||||
node_modules/
|
||||
.env
|
||||
build/
|
||||
dist/
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
.git
|
||||
7
test-project/test/test/README.md
Normal file
7
test-project/test/test/README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Test
|
||||
|
||||
Test
|
||||
|
||||
## Features
|
||||
|
||||
## Tech Stack
|
||||
2
test-project/test/test/main.py
Normal file
2
test-project/test/test/main.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Generated by AI Software Factory
|
||||
print('Hello, World!')
|
||||
Reference in New Issue
Block a user