Scaffold full project: Vue 3/Inertia frontend, Docker infra, domain model
- Frontend: Vue 3 + Inertia.js + Pinia + Tailwind CSS with layout and dashboard page - Infrastructure: Docker Compose with nginx, PHP-FPM, PostgreSQL+pgvector, Redis, Python AI service - Database: 22 migrations covering all domain entities (projects, phases, commitments, decisions, documents, handover, audit) - Models: 23 Eloquent models with relationships, casts, and 14 string-backed enums - AI service: FastAPI scaffold with health, chat, summarize, and search endpoints Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
21
ai-service/Dockerfile
Normal file
21
ai-service/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies needed for psycopg2
|
||||
RUN apt-get update && apt-get install -y \
|
||||
libpq-dev \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy and install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
0
ai-service/app/__init__.py
Normal file
0
ai-service/app/__init__.py
Normal file
117
ai-service/app/main.py
Normal file
117
ai-service/app/main.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
app = FastAPI(
|
||||
title="Innovatieplatform AI Service",
|
||||
description="AI service providing chat, summarization and semantic search for the Innovatieplatform.",
|
||||
version="0.1.0",
|
||||
)
|
||||
|
||||
# CORS — allow requests from the Laravel app and local development
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://laravel-app",
|
||||
"http://nginx",
|
||||
"http://localhost",
|
||||
"http://localhost:80",
|
||||
os.getenv("LARAVEL_APP_URL", "http://localhost"),
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# --- Request/Response models ---
|
||||
|
||||
class ChatRequest(BaseModel):
|
||||
message: str
|
||||
project_id: Optional[int] = None
|
||||
conversation_history: Optional[list] = []
|
||||
|
||||
|
||||
class ChatResponse(BaseModel):
|
||||
reply: str
|
||||
project_id: Optional[int] = None
|
||||
|
||||
|
||||
class SummarizeRequest(BaseModel):
|
||||
content: str
|
||||
project_id: Optional[int] = None
|
||||
summary_type: Optional[str] = "general"
|
||||
|
||||
|
||||
class SummarizeResponse(BaseModel):
|
||||
summary: str
|
||||
project_id: Optional[int] = None
|
||||
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
query: str
|
||||
project_id: Optional[int] = None
|
||||
limit: Optional[int] = 10
|
||||
|
||||
|
||||
class SearchResult(BaseModel):
|
||||
id: int
|
||||
content: str
|
||||
score: float
|
||||
metadata: Optional[dict] = {}
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
results: list[SearchResult]
|
||||
query: str
|
||||
|
||||
|
||||
# --- Endpoints ---
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint used by Docker and monitoring."""
|
||||
return {"status": "ok", "service": "ai-service"}
|
||||
|
||||
|
||||
@app.post("/api/chat", response_model=ChatResponse)
|
||||
async def chat(request: ChatRequest):
|
||||
"""
|
||||
Handle a chat message, optionally scoped to a project.
|
||||
Placeholder — wire up LangGraph + Anthropic in the full implementation.
|
||||
"""
|
||||
# TODO: integrate LangGraph agent with Anthropic Claude
|
||||
reply = (
|
||||
f"[AI placeholder] Received: '{request.message}'. "
|
||||
"Full AI integration pending."
|
||||
)
|
||||
return ChatResponse(reply=reply, project_id=request.project_id)
|
||||
|
||||
|
||||
@app.post("/api/summarize", response_model=SummarizeResponse)
|
||||
async def summarize(request: SummarizeRequest):
|
||||
"""
|
||||
Summarize content for a given project.
|
||||
Placeholder — wire up Anthropic in the full implementation.
|
||||
"""
|
||||
# TODO: integrate Anthropic Claude summarization
|
||||
summary = (
|
||||
f"[AI placeholder] Summary of {len(request.content)} characters "
|
||||
f"(type: {request.summary_type}). Full AI integration pending."
|
||||
)
|
||||
return SummarizeResponse(summary=summary, project_id=request.project_id)
|
||||
|
||||
|
||||
@app.post("/api/search", response_model=SearchResponse)
|
||||
async def search(request: SearchRequest):
|
||||
"""
|
||||
Semantic search using pgvector embeddings.
|
||||
Placeholder — wire up pgvector + embeddings in the full implementation.
|
||||
"""
|
||||
# TODO: integrate pgvector similarity search with embeddings
|
||||
return SearchResponse(
|
||||
results=[],
|
||||
query=request.query,
|
||||
)
|
||||
11
ai-service/requirements.txt
Normal file
11
ai-service/requirements.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
fastapi>=0.110.0
|
||||
uvicorn>=0.27.0
|
||||
langgraph>=0.0.40
|
||||
langchain>=0.1.0
|
||||
anthropic>=0.30.0
|
||||
pgvector>=0.2.0
|
||||
psycopg2-binary>=2.9.0
|
||||
numpy>=1.26.0
|
||||
pydantic>=2.0.0
|
||||
python-dotenv>=1.0.0
|
||||
httpx>=0.27.0
|
||||
Reference in New Issue
Block a user