pre Oauth2
This commit is contained in:
parent
956d865500
commit
a375a2f76d
24
Dockerfile
24
Dockerfile
|
|
@ -1,11 +1,25 @@
|
||||||
FROM python:3.10-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Installa dipendenze sistema
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
postgresql-client \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copia requirements e installa
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install -r requirements.txt --no-cache-dir
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
# Copy the application code into the container
|
# Copia applicazione
|
||||||
COPY . .
|
COPY app.py .
|
||||||
|
COPY init_db.py .
|
||||||
|
|
||||||
CMD ["chainlit", "run", "app.py", "--host", "0.0.0.0", "--port", "8000"]
|
# Crea directory necessarie
|
||||||
|
RUN mkdir -p /app/workspaces /app/public /app/.files
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Script di avvio con inizializzazione DB
|
||||||
|
CMD python init_db.py && chainlit run app.py --host 0.0.0.0 --port 8000
|
||||||
|
|
|
||||||
324
app.py
324
app.py
|
|
@ -1,61 +1,59 @@
|
||||||
import os
|
import os
|
||||||
import chainlit as cl
|
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
|
||||||
import shutil
|
|
||||||
import uuid
|
import uuid
|
||||||
|
import shutil
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import chainlit as cl
|
||||||
import ollama
|
import ollama
|
||||||
from qdrant_client import AsyncQdrantClient
|
from qdrant_client import AsyncQdrantClient
|
||||||
from qdrant_client.models import PointStruct, Distance, VectorParams
|
from qdrant_client.models import PointStruct, Distance, VectorParams
|
||||||
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
||||||
|
|
||||||
# --- CONFIGURAZIONE DATABASE (PostgreSQL) ---
|
# === CONFIGURAZIONE ===
|
||||||
# Assicurati che user/password coincidano con il tuo docker-compose.yml
|
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station")
|
||||||
# Sintassi: postgresql+asyncpg://user:password@host:port/dbname
|
OLLAMA_URL = os.getenv("OLLAMA_URL", "http://192.168.1.243:11434")
|
||||||
DATABASE_URL = "postgresql+asyncpg://user:password@postgres:5432/ai_station"
|
QDRANT_URL = os.getenv("QDRANT_URL", "http://qdrant:6333")
|
||||||
|
|
||||||
# Attiviamo il salvataggio su DB
|
# === INIZIALIZZAZIONE DATA LAYER ===
|
||||||
cl.data_layer = SQLAlchemyDataLayer(conn_string=DATABASE_URL)
|
# IMPORTANTE: Deve essere inizializzato PRIMA di definire gli handlers
|
||||||
# -
|
try:
|
||||||
|
data_layer = SQLAlchemyDataLayer(conninfo=DATABASE_URL)
|
||||||
# --- CONFIGURAZIONE HARD-CODED ---
|
cl.data_layer = data_layer
|
||||||
OLLAMA_URL = "http://192.168.1.243:11434"
|
print("✅ SQLAlchemyDataLayer initialized successfully")
|
||||||
# ---------------------------------
|
except Exception as e:
|
||||||
|
print(f"❌ Failed to initialize data layer: {e}")
|
||||||
USER_ROLES = {
|
cl.data_layer = None
|
||||||
'moglie@esempio.com': 'business',
|
|
||||||
'ingegnere@esempio.com': 'engineering',
|
|
||||||
'architetto@esempio.com': 'architecture',
|
|
||||||
'admin@esempio.com': 'admin'
|
|
||||||
}
|
|
||||||
|
|
||||||
WORKSPACES_DIR = "./workspaces"
|
WORKSPACES_DIR = "./workspaces"
|
||||||
|
USER_ROLE = "admin"
|
||||||
|
|
||||||
def create_workspace(user_role):
|
# === UTILITY FUNCTIONS ===
|
||||||
|
def create_workspace(user_role: str):
|
||||||
|
"""Crea directory workspace se non esiste"""
|
||||||
workspace_path = os.path.join(WORKSPACES_DIR, user_role)
|
workspace_path = os.path.join(WORKSPACES_DIR, user_role)
|
||||||
if not os.path.exists(workspace_path):
|
os.makedirs(workspace_path, exist_ok=True)
|
||||||
os.makedirs(workspace_path)
|
return workspace_path
|
||||||
|
|
||||||
def save_code_to_file(code, user_role):
|
def save_code_to_file(code: str, user_role: str) -> str:
|
||||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
"""Salva blocco codice come file .py"""
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
file_name = f"code_{timestamp}.py"
|
file_name = f"code_{timestamp}.py"
|
||||||
file_path = os.path.join(WORKSPACES_DIR, user_role, file_name)
|
file_path = os.path.join(WORKSPACES_DIR, user_role, file_name)
|
||||||
|
|
||||||
with open(file_path, "w") as file:
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
file.write(code)
|
f.write(code)
|
||||||
|
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
def limit_history(history):
|
# === QDRANT FUNCTIONS ===
|
||||||
if len(history) > 20:
|
async def get_qdrant_client() -> AsyncQdrantClient:
|
||||||
history = history[-20:]
|
"""Connessione a Qdrant"""
|
||||||
return history
|
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||||
|
|
||||||
async def connect_to_qdrant():
|
|
||||||
client = AsyncQdrantClient(url="http://qdrant:6333")
|
|
||||||
collection_name = "documents"
|
collection_name = "documents"
|
||||||
|
|
||||||
# Check if collection exists
|
# Crea collection se non esiste
|
||||||
if not await client.collection_exists(collection_name):
|
if not await client.collection_exists(collection_name):
|
||||||
await client.create_collection(
|
await client.create_collection(
|
||||||
collection_name=collection_name,
|
collection_name=collection_name,
|
||||||
|
|
@ -64,147 +62,203 @@ async def connect_to_qdrant():
|
||||||
|
|
||||||
return client
|
return client
|
||||||
|
|
||||||
async def get_embeddings(text):
|
async def get_embeddings(text: str) -> list:
|
||||||
|
"""Genera embeddings con Ollama"""
|
||||||
client = ollama.Client(host=OLLAMA_URL)
|
client = ollama.Client(host=OLLAMA_URL)
|
||||||
|
|
||||||
# Limite di sicurezza per evitare errori 500 su Ollama
|
# Limita lunghezza per evitare errori
|
||||||
limit = 2000
|
max_length = 2000
|
||||||
if len(text) > limit:
|
if len(text) > max_length:
|
||||||
text = text[:limit]
|
text = text[:max_length]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = client.embed(model='nomic-embed-text', input=text)
|
response = client.embed(model='nomic-embed-text', input=text)
|
||||||
|
|
||||||
if 'embeddings' in response:
|
if 'embeddings' in response:
|
||||||
return response['embeddings'][0]
|
return response['embeddings'][0]
|
||||||
return response.get('embedding')
|
return response.get('embedding', [])
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Errore Embedding: {e}")
|
print(f"❌ Errore Embedding: {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
async def search_qdrant(query_text, user_role):
|
async def index_document(file_name: str, content: str) -> bool:
|
||||||
"""Cerca documenti pertinenti su Qdrant"""
|
"""Indicizza documento su Qdrant"""
|
||||||
try:
|
try:
|
||||||
qdrant_client = await connect_to_qdrant()
|
embeddings = await get_embeddings(content)
|
||||||
|
if not embeddings:
|
||||||
|
return False
|
||||||
|
|
||||||
|
qdrant_client = await get_qdrant_client()
|
||||||
|
point_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
point = PointStruct(
|
||||||
|
id=point_id,
|
||||||
|
vector=embeddings,
|
||||||
|
payload={
|
||||||
|
"file_name": file_name,
|
||||||
|
"content": content,
|
||||||
|
"indexed_at": datetime.now().isoformat()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
await qdrant_client.upsert(collection_name="documents", points=[point])
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Errore indicizzazione: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def search_qdrant(query_text: str, limit: int = 3) -> str:
|
||||||
|
"""Ricerca documenti rilevanti"""
|
||||||
|
try:
|
||||||
|
qdrant_client = await get_qdrant_client()
|
||||||
query_embedding = await get_embeddings(query_text)
|
query_embedding = await get_embeddings(query_text)
|
||||||
|
|
||||||
if not query_embedding:
|
if not query_embedding:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
# Usa query_points (nuova API per AsyncClient)
|
|
||||||
search_result = await qdrant_client.query_points(
|
search_result = await qdrant_client.query_points(
|
||||||
collection_name="documents",
|
collection_name="documents",
|
||||||
query=query_embedding,
|
query=query_embedding,
|
||||||
limit=3
|
limit=limit
|
||||||
)
|
)
|
||||||
|
|
||||||
hits = search_result.points
|
|
||||||
contexts = []
|
contexts = []
|
||||||
if hits:
|
for hit in search_result.points:
|
||||||
for hit in hits:
|
|
||||||
try:
|
|
||||||
if hit.payload:
|
if hit.payload:
|
||||||
# --- FIX IMPORTANTE: Recupera il contenuto reale ---
|
file_name = hit.payload.get('file_name', 'Unknown')
|
||||||
nome_file = hit.payload.get('file_name', 'Sconosciuto')
|
content = hit.payload.get('content', '')
|
||||||
contenuto = hit.payload.get('content', '')
|
score = hit.score if hasattr(hit, 'score') else 0
|
||||||
|
|
||||||
contexts.append(f"--- Documento: {nome_file} ---\n{contenuto}\n----------------")
|
contexts.append(
|
||||||
except Exception as e:
|
f"📄 **{file_name}** (relevance: {score:.2f})\n"
|
||||||
print(f"Error parsing hit: {e}")
|
f"``````"
|
||||||
|
)
|
||||||
|
|
||||||
|
return "\n\n".join(contexts) if contexts else ""
|
||||||
|
|
||||||
return "\n".join(contexts)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Errore ricerca Qdrant: {e}")
|
print(f"❌ Errore ricerca Qdrant: {e}")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
# === CHAINLIT HANDLERS ===
|
||||||
@cl.on_chat_start
|
@cl.on_chat_start
|
||||||
async def chat_start():
|
async def on_chat_start():
|
||||||
user_email = "admin@esempio.com"
|
"""Inizializzazione chat"""
|
||||||
user_role = USER_ROLES.get(user_email, 'guest')
|
create_workspace(USER_ROLE)
|
||||||
|
|
||||||
create_workspace(user_role)
|
# Imposta variabili sessione
|
||||||
|
cl.user_session.set("role", USER_ROLE)
|
||||||
|
|
||||||
cl.user_session.set("history", [])
|
# Verifica persistenza
|
||||||
cl.user_session.set("role", user_role)
|
persistence_status = "✅ Attiva" if cl.data_layer else "⚠️ Disattivata"
|
||||||
|
|
||||||
welcome_msg = f"Welcome, {user_role.capitalize()}!"
|
await cl.Message(
|
||||||
await cl.Message(content=welcome_msg).send()
|
content=f"🚀 **AI Station Ready** - Workspace: `{USER_ROLE}`\n\n"
|
||||||
|
f"📤 Upload `.txt` files per indicizzarli nel RAG\n"
|
||||||
|
f"💾 Persistenza conversazioni: {persistence_status}\n"
|
||||||
|
f"🤖 Modello: `qwen2.5-coder:7b` @ {OLLAMA_URL}"
|
||||||
|
).send()
|
||||||
|
|
||||||
@cl.on_message
|
@cl.on_message
|
||||||
async def message(message):
|
async def on_message(message: cl.Message):
|
||||||
user_role = cl.user_session.get("role", 'guest')
|
"""Gestione messaggi utente"""
|
||||||
|
user_role = cl.user_session.get("role", "guest")
|
||||||
if not user_role:
|
|
||||||
await cl.Message(content="User role not found").send()
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = ollama.Client(host=OLLAMA_URL)
|
# === STEP 1: Gestione Upload ===
|
||||||
history = cl.user_session.get("history", [])
|
|
||||||
history = limit_history(history)
|
|
||||||
|
|
||||||
# --- PASSO 1: Gestione Upload e Indexing (PRIMA della ricerca) ---
|
|
||||||
if message.elements:
|
if message.elements:
|
||||||
uploaded_files = []
|
await handle_file_uploads(message.elements, user_role)
|
||||||
for element in message.elements:
|
|
||||||
try:
|
|
||||||
dest_path = os.path.join(WORKSPACES_DIR, user_role, element.name)
|
|
||||||
with open(element.path, 'rb') as src, open(dest_path, 'wb') as dst:
|
|
||||||
shutil.copyfileobj(src, dst)
|
|
||||||
|
|
||||||
|
# === STEP 2: RAG Search ===
|
||||||
|
context_text = await search_qdrant(message.content, limit=3)
|
||||||
|
|
||||||
|
# === STEP 3: Preparazione Prompt ===
|
||||||
|
messages = []
|
||||||
|
|
||||||
|
if context_text:
|
||||||
|
system_prompt = (
|
||||||
|
"Sei un assistente AI esperto. Usa ESCLUSIVAMENTE il seguente contesto "
|
||||||
|
"per rispondere. Se la risposta non è nel contesto, dillo chiaramente.\n\n"
|
||||||
|
f"**CONTESTO:**\n{context_text}"
|
||||||
|
)
|
||||||
|
messages.append({"role": "system", "content": system_prompt})
|
||||||
|
|
||||||
|
messages.append({"role": "user", "content": message.content})
|
||||||
|
|
||||||
|
# === STEP 4: Chiamata Ollama con Streaming ===
|
||||||
|
client = ollama.Client(host=OLLAMA_URL)
|
||||||
|
|
||||||
|
msg = cl.Message(content="")
|
||||||
|
await msg.send()
|
||||||
|
|
||||||
|
stream = client.chat(
|
||||||
|
model='qwen2.5-coder:7b',
|
||||||
|
messages=messages,
|
||||||
|
stream=True
|
||||||
|
)
|
||||||
|
|
||||||
|
full_response = ""
|
||||||
|
for chunk in stream:
|
||||||
|
content = chunk['message']['content']
|
||||||
|
full_response += content
|
||||||
|
await msg.stream_token(content)
|
||||||
|
|
||||||
|
await msg.update()
|
||||||
|
|
||||||
|
# === STEP 5: Estrai e Salva Codice ===
|
||||||
|
code_blocks = re.findall(r"``````", full_response, re.DOTALL)
|
||||||
|
|
||||||
|
if code_blocks:
|
||||||
|
elements = []
|
||||||
|
for code in code_blocks:
|
||||||
|
file_path = save_code_to_file(code.strip(), user_role)
|
||||||
|
elements.append(
|
||||||
|
cl.File(
|
||||||
|
name=os.path.basename(file_path),
|
||||||
|
path=file_path,
|
||||||
|
display="inline"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await cl.Message(
|
||||||
|
content=f"💾 Codice salvato in `{user_role}/`",
|
||||||
|
elements=elements
|
||||||
|
).send()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
await cl.Message(content=f"❌ **Errore:** {str(e)}").send()
|
||||||
|
|
||||||
|
async def handle_file_uploads(elements, user_role: str):
|
||||||
|
"""Gestisce upload e indicizzazione file"""
|
||||||
|
for element in elements:
|
||||||
|
try:
|
||||||
|
# Salva file
|
||||||
|
dest_path = os.path.join(WORKSPACES_DIR, user_role, element.name)
|
||||||
|
shutil.copy(element.path, dest_path)
|
||||||
|
|
||||||
|
# Indicizza solo .txt
|
||||||
if element.name.endswith('.txt'):
|
if element.name.endswith('.txt'):
|
||||||
# Encoding utf-8 per sicurezza
|
|
||||||
with open(dest_path, 'r', encoding='utf-8') as f:
|
with open(dest_path, 'r', encoding='utf-8') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
embeddings = await get_embeddings(content)
|
success = await index_document(element.name, content)
|
||||||
if embeddings:
|
|
||||||
qdrant_client = await connect_to_qdrant()
|
|
||||||
point_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# --- FIX IMPORTANTE: Salviamo anche il contenuto nel payload ---
|
if success:
|
||||||
point = PointStruct(
|
await cl.Message(
|
||||||
id=point_id,
|
content=f"✅ **{element.name}** indicizzato in Qdrant"
|
||||||
vector=embeddings,
|
).send()
|
||||||
payload={
|
else:
|
||||||
"file_name": element.name,
|
await cl.Message(
|
||||||
"content": content
|
content=f"⚠️ Errore indicizzazione {element.name}"
|
||||||
}
|
).send()
|
||||||
)
|
else:
|
||||||
await qdrant_client.upsert(collection_name="documents", points=[point])
|
await cl.Message(
|
||||||
await cl.Message(content=f"Documento '{element.name}' indicizzato.").send()
|
content=f"📁 **{element.name}** salvato (solo .txt vengono indicizzati)"
|
||||||
|
).send()
|
||||||
uploaded_files.append(element.name)
|
|
||||||
except Exception as e:
|
|
||||||
await cl.Message(content=f"Error saving {element.name}: {e}").send()
|
|
||||||
|
|
||||||
if uploaded_files:
|
|
||||||
await cl.Message(content=f"Files saved: {', '.join(uploaded_files)}").send()
|
|
||||||
|
|
||||||
# --- PASSO 2: Cerca nei documenti ---
|
|
||||||
context_text = await search_qdrant(message.content, user_role)
|
|
||||||
|
|
||||||
if context_text:
|
|
||||||
system_prompt = f"Usa esclusivamente il seguente contesto per rispondere alla domanda. Se la risposta non è nel contesto, dillo.\n\nContesto:\n{context_text}"
|
|
||||||
history.insert(0, {"role": "system", "content": system_prompt})
|
|
||||||
|
|
||||||
history.append({"role": "user", "content": message.content})
|
|
||||||
|
|
||||||
# --- PASSO 3: Chat Generation ---
|
|
||||||
response = client.chat(model='qwen2.5-coder:7b', messages=history)
|
|
||||||
|
|
||||||
# Code Extraction
|
|
||||||
code_blocks = re.findall(r"```python(.*?)```", response['message']['content'], re.DOTALL)
|
|
||||||
|
|
||||||
elements = []
|
|
||||||
if code_blocks:
|
|
||||||
for code in code_blocks:
|
|
||||||
file_path = save_code_to_file(code, user_role)
|
|
||||||
elements.append(cl.File(name=os.path.basename(file_path), path=file_path))
|
|
||||||
|
|
||||||
history.append({"role": "assistant", "content": response['message']['content']})
|
|
||||||
cl.user_session.set("history", history)
|
|
||||||
|
|
||||||
await cl.Message(content=response['message']['content'], elements=elements).send()
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await cl.Message(content=f"Error: {e}").send()
|
await cl.Message(
|
||||||
|
content=f"❌ Errore con {element.name}: {str(e)}"
|
||||||
|
).send()
|
||||||
|
|
|
||||||
|
|
@ -1,44 +1,68 @@
|
||||||
services:
|
services:
|
||||||
chainlit-app:
|
postgres:
|
||||||
build: .
|
image: postgres:15-alpine
|
||||||
ports:
|
container_name: ai-station-postgres
|
||||||
- "8000:8000"
|
|
||||||
environment:
|
environment:
|
||||||
- CHAINLIT_AUTH_HEADER=X-Email
|
POSTGRES_DB: ai_station
|
||||||
- OLLAMA_API_BASE=http://192.168.1.243:11434
|
POSTGRES_USER: ai_user
|
||||||
|
POSTGRES_PASSWORD: secure_password_here
|
||||||
volumes:
|
volumes:
|
||||||
- ./:/app
|
- postgres_data:/var/lib/postgresql/data
|
||||||
depends_on:
|
ports:
|
||||||
- qdrant
|
- "5432:5432"
|
||||||
- postgres
|
|
||||||
networks:
|
networks:
|
||||||
- ai-station-net
|
- ai-station-net
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ai_user -d ai_station"] # <- AGGIUNGI -d ai_station
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
qdrant:
|
qdrant:
|
||||||
image: qdrant/qdrant:latest
|
image: qdrant/qdrant:latest
|
||||||
|
container_name: ai-station-qdrant
|
||||||
|
volumes:
|
||||||
|
- qdrant_data:/qdrant/storage
|
||||||
ports:
|
ports:
|
||||||
- "6333:6333"
|
- "6333:6333"
|
||||||
volumes:
|
- "6334:6334"
|
||||||
- qdrant-data:/qdrant/db
|
|
||||||
networks:
|
networks:
|
||||||
- ai-station-net
|
- ai-station-net
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
postgres:
|
chainlit-app:
|
||||||
image: postgres:16
|
build: .
|
||||||
environment:
|
container_name: ai-station-app
|
||||||
POSTGRES_DB: ai_station
|
|
||||||
POSTGRES_USER: user
|
|
||||||
POSTGRES_PASSWORD: password
|
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "8000:8000"
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station
|
||||||
|
- OLLAMA_URL=http://192.168.1.243:11434
|
||||||
|
- QDRANT_URL=http://qdrant:6333
|
||||||
|
- CHAINLIT_AUTH_SECRET=your-secret-key-here
|
||||||
volumes:
|
volumes:
|
||||||
- postgres-data:/var/lib/postgresql/data
|
- ./workspaces:/app/workspaces
|
||||||
|
- ./public:/app/public
|
||||||
networks:
|
networks:
|
||||||
- ai-station-net
|
- ai-station-net
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
qdrant:
|
||||||
|
condition: service_started
|
||||||
|
command: chainlit run app.py --host 0.0.0.0 --port 8000
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
qdrant-data:
|
postgres_data:
|
||||||
postgres-data:
|
driver: local
|
||||||
|
qdrant_data:
|
||||||
|
driver: local
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
ai-station-net:
|
ai-station-net:
|
||||||
|
driver: bridge
|
||||||
|
ipam:
|
||||||
|
config:
|
||||||
|
- subnet: 172.28.0.0/16
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,100 @@
|
||||||
|
ai-station-qdrant | _ _
|
||||||
|
ai-station-postgres |
|
||||||
|
ai-station-postgres | PostgreSQL Database directory appears to contain a database; Skipping initialization
|
||||||
|
ai-station-postgres |
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default config file at /app/.chainlit/config.toml
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.238 UTC [1] LOG: starting PostgreSQL 15.15 on x86_64-pc-linux-musl, compiled by gcc (Alpine 15.2.0) 15.2.0, 64-bit
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation directory at /app/.chainlit/translations
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.238 UTC [1] LOG: listening on IPv4 address "0.0.0.0", port 5432
|
||||||
|
ai-station-qdrant | __ _ __| |_ __ __ _ _ __ | |_
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.239 UTC [1] LOG: listening on IPv6 address "::", port 5432
|
||||||
|
ai-station-qdrant | / _` |/ _` | '__/ _` | '_ \| __|
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.259 UTC [1] LOG: listening on Unix socket "/var/run/postgresql/.s.PGSQL.5432"
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.285 UTC [29] LOG: database system was shut down at 2025-12-26 16:16:26 UTC
|
||||||
|
ai-station-postgres | 2025-12-26 16:21:40.313 UTC [1] LOG: database system is ready to accept connections
|
||||||
|
ai-station-postgres | 2025-12-26 16:26:40.323 UTC [27] LOG: checkpoint starting: time
|
||||||
|
ai-station-qdrant | | (_| | (_| | | | (_| | | | | |_
|
||||||
|
ai-station-qdrant | \__, |\__,_|_| \__,_|_| |_|\__|
|
||||||
|
ai-station-qdrant | |_|
|
||||||
|
ai-station-postgres | 2025-12-26 16:26:40.397 UTC [27] LOG: checkpoint complete: wrote 3 buffers (0.0%); 0 WAL file(s) added, 0 removed, 0 recycled; write=0.020 s, sync=0.010 s, total=0.075 s; sync files=2, longest=0.006 s, average=0.005 s; distance=0 kB, estimate=0 kB
|
||||||
|
ai-station-qdrant |
|
||||||
|
ai-station-qdrant | Version: 1.16.3, build: bd49f45a
|
||||||
|
ai-station-qdrant | Access web UI at http://localhost:6333/dashboard
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/hi.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/en-US.json
|
||||||
|
ai-station-qdrant |
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:39.943731Z INFO storage::content_manager::consensus::persistent: Loading raft state from ./storage/raft_state.json
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:39.964037Z INFO storage::content_manager::toc: Loading collection: documents
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.041342Z INFO collection::shards::local_shard: Recovering shard ./storage/collections/documents/0: 0/1 (0%)
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/bn.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/ta.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/te.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/gu.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/zh-CN.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/ml.json
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.115966Z INFO collection::shards::local_shard: Recovered collection documents: 1/1 (100%)
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.117838Z INFO qdrant: Distributed mode disabled
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/kn.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/he-IL.json
|
||||||
|
ai-station-app | 2025-12-26 16:21:52 - Created default translation file at /app/.chainlit/translations/mr.json
|
||||||
|
ai-station-app | 2025-12-26 16:22:00 - SQLAlchemyDataLayer storage client is not initialized and elements will not be persisted!
|
||||||
|
ai-station-app | ✅ SQLAlchemyDataLayer initialized successfully
|
||||||
|
ai-station-app | 2025-12-26 16:22:00 - Created default chainlit markdown file at /app/chainlit.md
|
||||||
|
ai-station-app | 2025-12-26 16:22:00 - Your app is available at http://0.0.0.0:8000
|
||||||
|
ai-station-app | 2025-12-26 16:32:21 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:32:28 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:32:29 - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||||
|
ai-station-app | 2025-12-26 16:32:46 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:32:49 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:32:49 - HTTP Request: GET http://qdrant:6333/collections/documents/exists "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:32:50 - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 200 OK"
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.117951Z INFO qdrant: Telemetry reporting enabled, id: 0b179897-a2e7-448d-97f2-1fe4e1915faa
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.191722Z INFO qdrant::actix: TLS disabled for REST API
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.192047Z INFO qdrant::actix: Qdrant HTTP listening on 6333
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.192122Z INFO actix_server::builder: starting 15 workers
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.192211Z INFO actix_server::server: Actix runtime found; starting in Actix runtime
|
||||||
|
ai-station-app | 2025-12-26 16:32:50 - HTTP Request: POST http://qdrant:6333/collections/documents/points/query "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:32:52 - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:04 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:33:09 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:33:18 - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.192262Z INFO actix_server::server: starting service: "actix-web-service-0.0.0.0:6333", workers: 15, listening on: 0.0.0.0:6333
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.212492Z INFO qdrant::tonic: Qdrant gRPC listening on 6334
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: GET http://qdrant:6333/collections/documents/exists "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: PUT http://qdrant:6333/collections/documents/points?wait=true "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: GET http://qdrant:6333/collections/documents/exists "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: POST http://qdrant:6333/collections/documents/points/query "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:33:19 - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:34:13 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:34:16 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:34:16 - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||||
|
ai-station-app | 2025-12-26 16:34:30 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:34:42 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:34:43 - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||||
|
ai-station-app | 2025-12-26 16:35:04 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: GET http://qdrant:6333/collections/documents/exists "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: PUT http://qdrant:6333/collections/documents/points?wait=true "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: GET http://qdrant:6333/collections/documents/exists "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: POST http://qdrant:6333/collections/documents/points/query "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:13 - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||||
|
ai-station-app | 2025-12-26 16:35:24 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-app | 2025-12-26 16:35:24 - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||||
|
ai-station-app | 2025-12-26 16:35:37 - Translation file for it-IT not found. Using default translation en-US.
|
||||||
|
ai-station-qdrant | 2025-12-26T16:21:40.212579Z INFO qdrant::tonic: TLS disabled for gRPC API
|
||||||
|
ai-station-qdrant | 2025-12-26T16:32:49.529256Z INFO actix_web::middleware::logger: 172.28.0.4 "GET /collections/documents/exists HTTP/1.1" 200 81 "-" "python-client/1.16.2 python/3.11.14" 0.001330
|
||||||
|
ai-station-qdrant | 2025-12-26T16:32:50.646822Z INFO actix_web::middleware::logger: 172.28.0.4 "POST /collections/documents/points/query HTTP/1.1" 200 4905 "-" "python-client/1.16.2 python/3.11.14" 0.014367
|
||||||
|
ai-station-qdrant | 2025-12-26T16:33:19.146916Z INFO actix_web::middleware::logger: 172.28.0.4 "GET /collections/documents/exists HTTP/1.1" 200 82 "-" "python-client/1.16.2 python/3.11.14" 0.000969
|
||||||
|
ai-station-qdrant | 2025-12-26T16:33:19.179182Z INFO actix_web::middleware::logger: 172.28.0.4 "PUT /collections/documents/points?wait=true HTTP/1.1" 200 91 "-" "python-client/1.16.2 python/3.11.14" 0.021875
|
||||||
|
ai-station-qdrant | 2025-12-26T16:33:19.309152Z INFO actix_web::middleware::logger: 172.28.0.4 "GET /collections/documents/exists HTTP/1.1" 200 82 "-" "python-client/1.16.2 python/3.11.14" 0.000935
|
||||||
|
ai-station-qdrant | 2025-12-26T16:33:19.416423Z INFO actix_web::middleware::logger: 172.28.0.4 "POST /collections/documents/points/query HTTP/1.1" 200 5732 "-" "python-client/1.16.2 python/3.11.14" 0.011009
|
||||||
|
ai-station-qdrant | 2025-12-26T16:35:13.413627Z INFO actix_web::middleware::logger: 172.28.0.4 "GET /collections/documents/exists HTTP/1.1" 200 82 "-" "python-client/1.16.2 python/3.11.14" 0.000958
|
||||||
|
ai-station-qdrant | 2025-12-26T16:35:13.438965Z INFO actix_web::middleware::logger: 172.28.0.4 "PUT /collections/documents/points?wait=true HTTP/1.1" 200 91 "-" "python-client/1.16.2 python/3.11.14" 0.015794
|
||||||
|
ai-station-qdrant | 2025-12-26T16:35:13.576292Z INFO actix_web::middleware::logger: 172.28.0.4 "GET /collections/documents/exists HTTP/1.1" 200 82 "-" "python-client/1.16.2 python/3.11.14" 0.000953
|
||||||
|
ai-station-qdrant | 2025-12-26T16:35:13.672837Z INFO actix_web::middleware::logger: 172.28.0.4 "POST /collections/documents/points/query HTTP/1.1" 200 5705 "-" "python-client/1.16.2 python/3.11.14" 0.010556
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
*.pyd
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.venv
|
||||||
|
venv/
|
||||||
|
workspaces/
|
||||||
|
*.db
|
||||||
|
.env
|
||||||
|
*.log
|
||||||
|
.DS_Store
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
import asyncio
|
||||||
|
from sqlalchemy import create_engine, text
|
||||||
|
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
||||||
|
|
||||||
|
DATABASE_URL = "postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station"
|
||||||
|
|
||||||
|
async def init_database():
|
||||||
|
"""Inizializza le tabelle per Chainlit"""
|
||||||
|
print("🔧 Inizializzazione database...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Crea data layer
|
||||||
|
data_layer = SQLAlchemyDataLayer(conninfo=DATABASE_URL)
|
||||||
|
|
||||||
|
# Forza creazione tabelle
|
||||||
|
if hasattr(data_layer, '_create_database'):
|
||||||
|
await data_layer._create_database()
|
||||||
|
print("✅ Database inizializzato con successo")
|
||||||
|
else:
|
||||||
|
print("⚠️ Metodo _create_database non disponibile")
|
||||||
|
print("ℹ️ Le tabelle verranno create automaticamente al primo utilizzo")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Errore: {e}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(init_database())
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
chainlit==1.3.2
|
chainlit==1.3.2
|
||||||
pydantic==2.9.2
|
pydantic==2.9.2
|
||||||
ollama
|
ollama
|
||||||
asyncpg
|
asyncpg>=0.29.0
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
qdrant-client>=1.10.0
|
qdrant-client>=1.10.0
|
||||||
sqlalchemy
|
sqlalchemy>=2.0.0
|
||||||
|
greenlet>=3.0.0
|
||||||
sniffio
|
sniffio
|
||||||
aiohttp
|
aiohttp
|
||||||
|
alembic
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue