Scaffold full project: Vue 3/Inertia frontend, Docker infra, domain model
- Frontend: Vue 3 + Inertia.js + Pinia + Tailwind CSS with layout and dashboard page - Infrastructure: Docker Compose with nginx, PHP-FPM, PostgreSQL+pgvector, Redis, Python AI service - Database: 22 migrations covering all domain entities (projects, phases, commitments, decisions, documents, handover, audit) - Models: 23 Eloquent models with relationships, casts, and 14 string-backed enums - AI service: FastAPI scaffold with health, chat, summarize, and search endpoints Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
0
ai-service/app/__init__.py
Normal file
0
ai-service/app/__init__.py
Normal file
117
ai-service/app/main.py
Normal file
117
ai-service/app/main.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
app = FastAPI(
|
||||
title="Innovatieplatform AI Service",
|
||||
description="AI service providing chat, summarization and semantic search for the Innovatieplatform.",
|
||||
version="0.1.0",
|
||||
)
|
||||
|
||||
# CORS — allow requests from the Laravel app and local development
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://laravel-app",
|
||||
"http://nginx",
|
||||
"http://localhost",
|
||||
"http://localhost:80",
|
||||
os.getenv("LARAVEL_APP_URL", "http://localhost"),
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# --- Request/Response models ---
|
||||
|
||||
class ChatRequest(BaseModel):
|
||||
message: str
|
||||
project_id: Optional[int] = None
|
||||
conversation_history: Optional[list] = []
|
||||
|
||||
|
||||
class ChatResponse(BaseModel):
|
||||
reply: str
|
||||
project_id: Optional[int] = None
|
||||
|
||||
|
||||
class SummarizeRequest(BaseModel):
|
||||
content: str
|
||||
project_id: Optional[int] = None
|
||||
summary_type: Optional[str] = "general"
|
||||
|
||||
|
||||
class SummarizeResponse(BaseModel):
|
||||
summary: str
|
||||
project_id: Optional[int] = None
|
||||
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
query: str
|
||||
project_id: Optional[int] = None
|
||||
limit: Optional[int] = 10
|
||||
|
||||
|
||||
class SearchResult(BaseModel):
|
||||
id: int
|
||||
content: str
|
||||
score: float
|
||||
metadata: Optional[dict] = {}
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
results: list[SearchResult]
|
||||
query: str
|
||||
|
||||
|
||||
# --- Endpoints ---
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint used by Docker and monitoring."""
|
||||
return {"status": "ok", "service": "ai-service"}
|
||||
|
||||
|
||||
@app.post("/api/chat", response_model=ChatResponse)
|
||||
async def chat(request: ChatRequest):
|
||||
"""
|
||||
Handle a chat message, optionally scoped to a project.
|
||||
Placeholder — wire up LangGraph + Anthropic in the full implementation.
|
||||
"""
|
||||
# TODO: integrate LangGraph agent with Anthropic Claude
|
||||
reply = (
|
||||
f"[AI placeholder] Received: '{request.message}'. "
|
||||
"Full AI integration pending."
|
||||
)
|
||||
return ChatResponse(reply=reply, project_id=request.project_id)
|
||||
|
||||
|
||||
@app.post("/api/summarize", response_model=SummarizeResponse)
|
||||
async def summarize(request: SummarizeRequest):
|
||||
"""
|
||||
Summarize content for a given project.
|
||||
Placeholder — wire up Anthropic in the full implementation.
|
||||
"""
|
||||
# TODO: integrate Anthropic Claude summarization
|
||||
summary = (
|
||||
f"[AI placeholder] Summary of {len(request.content)} characters "
|
||||
f"(type: {request.summary_type}). Full AI integration pending."
|
||||
)
|
||||
return SummarizeResponse(summary=summary, project_id=request.project_id)
|
||||
|
||||
|
||||
@app.post("/api/search", response_model=SearchResponse)
|
||||
async def search(request: SearchRequest):
|
||||
"""
|
||||
Semantic search using pgvector embeddings.
|
||||
Placeholder — wire up pgvector + embeddings in the full implementation.
|
||||
"""
|
||||
# TODO: integrate pgvector similarity search with embeddings
|
||||
return SearchResponse(
|
||||
results=[],
|
||||
query=request.query,
|
||||
)
|
||||
Reference in New Issue
Block a user