Compare commits
4 Commits
main
...
feature/vi
| Author | SHA1 | Date |
|---|---|---|
|
|
40540387c8 | |
|
|
0aa1474744 | |
|
|
b2ff4238af | |
|
|
939a3d11a7 |
|
|
@ -1,68 +1,161 @@
|
|||
[project]
|
||||
# List of environment variables to be provided by each user to use the app.
|
||||
user_env = []
|
||||
|
||||
# Duration (in seconds) during which the session is saved when the connection is lost
|
||||
session_timeout = 3600
|
||||
user_session_timeout = 1296000
|
||||
|
||||
# Duration (in seconds) of the user session expiry
|
||||
user_session_timeout = 1296000 # 15 days
|
||||
|
||||
# Enable third parties caching (e.g., LangChain cache)
|
||||
cache = false
|
||||
|
||||
# Whether to persist user environment variables (API keys) to the database
|
||||
# Set to true to store user env vars in DB, false to exclude them for security
|
||||
persist_user_env = false
|
||||
|
||||
# Whether to mask user environment variables (API keys) in the UI with password type
|
||||
# Set to true to show API keys as ***, false to show them as plain text
|
||||
mask_user_env = false
|
||||
|
||||
# Authorized origins
|
||||
allow_origins = ["*"]
|
||||
|
||||
[features]
|
||||
# Process and display HTML in messages. This can be a security risk (see https://stackoverflow.com/questions/19603097/why-is-it-dangerous-to-render-user-generated-html-or-javascript)
|
||||
unsafe_allow_html = true
|
||||
|
||||
# Process and display mathematical expressions. This can clash with "$" characters in messages.
|
||||
latex = false
|
||||
|
||||
# Autoscroll new user messages at the top of the window
|
||||
user_message_autoscroll = true
|
||||
|
||||
# Automatically tag threads with the current chat profile (if a chat profile is used)
|
||||
auto_tag_thread = true
|
||||
|
||||
# Allow users to edit their own messages
|
||||
edit_message = true
|
||||
|
||||
# Allow users to share threads (backend + UI). Requires an app-defined on_shared_thread_view callback.
|
||||
allow_thread_sharing = false
|
||||
|
||||
[features.slack]
|
||||
# Add emoji reaction when message is received (requires reactions:write OAuth scope)
|
||||
reaction_on_message_received = false
|
||||
|
||||
# Authorize users to spontaneously upload files with messages
|
||||
[features.spontaneous_file_upload]
|
||||
enabled = true
|
||||
accept = ["*"]
|
||||
max_files = 20
|
||||
max_size_mb = 500
|
||||
enabled = true
|
||||
# Define accepted file types using MIME types
|
||||
# Examples:
|
||||
# 1. For specific file types:
|
||||
# accept = ["image/jpeg", "image/png", "application/pdf"]
|
||||
# 2. For all files of certain type:
|
||||
# accept = ["image/*", "audio/*", "video/*"]
|
||||
# 3. For specific file extensions:
|
||||
# accept = { "application/octet-stream" = [".xyz", ".pdb"] }
|
||||
# Note: Using "*/*" is not recommended as it may cause browser warnings
|
||||
accept = ["*"]
|
||||
max_files = 20
|
||||
max_size_mb = 500
|
||||
|
||||
[features.audio]
|
||||
enabled = false
|
||||
sample_rate = 24000
|
||||
# Enable audio features
|
||||
enabled = false
|
||||
# Sample rate of the audio
|
||||
sample_rate = 24000
|
||||
|
||||
[features.mcp]
|
||||
enabled = false
|
||||
# Enable Model Context Protocol (MCP) features
|
||||
enabled = false
|
||||
|
||||
[features.mcp.sse]
|
||||
enabled = true
|
||||
|
||||
[features.mcp.streamable-http]
|
||||
enabled = true
|
||||
|
||||
[features.mcp.stdio]
|
||||
enabled = true
|
||||
# Only the executables in the allow list can be used for MCP stdio server.
|
||||
# Only need the base name of the executable, e.g. "npx", not "/usr/bin/npx".
|
||||
# Please don't comment this line for now, we need it to parse the executable name.
|
||||
allowed_executables = [ "npx", "uvx" ]
|
||||
|
||||
[UI]
|
||||
# Name of the assistant.
|
||||
name = "Ai Station DFFM"
|
||||
|
||||
default_theme = "dark"
|
||||
|
||||
layout = "wide"
|
||||
|
||||
default_sidebar_state = "open"
|
||||
|
||||
# Più “SaaS”: evita di mostrare troppo ragionamento di default
|
||||
cot = "tool_call"
|
||||
# Description of the assistant. This is used for HTML tags.
|
||||
# description = ""
|
||||
|
||||
custom_css = "/public/ui-s-tier.css"
|
||||
# Chain of Thought (CoT) display mode. Can be "hidden", "tool_call" or "full".
|
||||
cot = "full"
|
||||
|
||||
# Specify a CSS file that can be used to customize the user interface.
|
||||
# The CSS file can be served from the public directory or via an external link.
|
||||
# custom_css = "/public/test.css"
|
||||
|
||||
# CSS personalizzato
|
||||
custom_css = "/public/custom.css"
|
||||
|
||||
# Logo custom
|
||||
[UI.theme]
|
||||
primary_color = "#0066CC"
|
||||
primary_color = "#0066CC" # Colore brand
|
||||
background_color = "#1a1a1a"
|
||||
alert_style = "modern"
|
||||
|
||||
# Brand assets (metti i file in /public/brand/)
|
||||
logo_file_url = "/public/brand/logo-header.png"
|
||||
default_avatar_file_url = "/public/brand/avatar.png"
|
||||
login_page_image = "/public/brand/login.jpg"
|
||||
login_page_image_filter = "brightness-50 grayscale"
|
||||
# Specify additional attributes for a custom CSS file
|
||||
# custom_css_attributes = "media=\"print\""
|
||||
|
||||
[[UI.header_links]]
|
||||
name = "Docs"
|
||||
display_name = "Docs"
|
||||
icon_url = "/public/brand/icon-32.png"
|
||||
url = "https://ai.dffm.it"
|
||||
target = "_blank"
|
||||
# Specify a JavaScript file that can be used to customize the user interface.
|
||||
# The JavaScript file can be served from the public directory.
|
||||
# custom_js = "/public/test.js"
|
||||
|
||||
[[UI.header_links]]
|
||||
name = "Support"
|
||||
display_name = "Support"
|
||||
icon_url = "/public/brand/icon-32.png"
|
||||
url = "mailto:support@dffm.it"
|
||||
target = "_blank"
|
||||
# The style of alert boxes. Can be "classic" or "modern".
|
||||
alert_style = "classic"
|
||||
|
||||
# Specify additional attributes for custom JS file
|
||||
# custom_js_attributes = "async type = \"module\""
|
||||
|
||||
# Custom login page image, relative to public directory or external URL
|
||||
# login_page_image = "/public/custom-background.jpg"
|
||||
|
||||
# Custom login page image filter (Tailwind internal filters, no dark/light variants)
|
||||
# login_page_image_filter = "brightness-50 grayscale"
|
||||
# login_page_image_dark_filter = "contrast-200 blur-sm"
|
||||
|
||||
# Specify a custom meta URL (used for meta tags like og:url)
|
||||
# custom_meta_url = "https://github.com/Chainlit/chainlit"
|
||||
|
||||
# Specify a custom meta image url.
|
||||
# custom_meta_image_url = "https://chainlit-cloud.s3.eu-west-3.amazonaws.com/logo/chainlit_banner.png"
|
||||
|
||||
# Load assistant logo directly from URL.
|
||||
logo_file_url = ""
|
||||
|
||||
# Load assistant avatar image directly from URL.
|
||||
default_avatar_file_url = ""
|
||||
|
||||
# Specify a custom build directory for the frontend.
|
||||
# This can be used to customize the frontend code.
|
||||
# Be careful: If this is a relative path, it should not start with a slash.
|
||||
# custom_build = "./public/build"
|
||||
|
||||
# Specify optional one or more custom links in the header.
|
||||
# [[UI.header_links]]
|
||||
# name = "Issues"
|
||||
# display_name = "Report Issue"
|
||||
# icon_url = "https://avatars.githubusercontent.com/u/128686189?s=200&v=4"
|
||||
# url = "https://github.com/Chainlit/chainlit/issues"
|
||||
# target = "_blank" (default) # Optional: "_self", "_parent", "_top".
|
||||
|
||||
[meta]
|
||||
generated_by = "2.8.3"
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
After Width: | Height: | Size: 162 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
After Width: | Height: | Size: 162 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,463 @@
|
|||
import os
|
||||
import re
|
||||
import uuid
|
||||
import shutil
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, List, Any
|
||||
import chainlit as cl
|
||||
import ollama
|
||||
from docling.document_converter import DocumentConverter
|
||||
from qdrant_client import AsyncQdrantClient
|
||||
# CORREZIONE IMPORT: Importiamo le classi necessarie direttamente dalla libreria
|
||||
from qdrant_client.models import PointStruct, Distance, VectorParams, SparseVectorParams, Prefetch
|
||||
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
||||
from chainlit.types import ThreadDict
|
||||
from functools import lru_cache
|
||||
|
||||
# === FIX IMPORT ROBUSTO ===
|
||||
try:
|
||||
from chainlit.data.storage_clients import BaseStorageClient
|
||||
except ImportError:
|
||||
try:
|
||||
from chainlit.data.base import BaseStorageClient
|
||||
except ImportError:
|
||||
from chainlit.data.storage_clients.base import BaseStorageClient
|
||||
|
||||
# === CONFIGURAZIONE ===
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station")
|
||||
OLLAMA_URL = os.getenv("OLLAMA_URL", "http://192.168.1.243:11434")
|
||||
QDRANT_URL = os.getenv("QDRANT_URL", "http://qdrant:6333")
|
||||
BGE_API_URL = os.getenv("BGE_API_URL", "http://192.168.1.243:8001/embed")
|
||||
|
||||
VISION_MODEL = "minicpm-v"
|
||||
DEFAULT_TEXT_MODEL = "glm-4.6:cloud"
|
||||
|
||||
WORKSPACES_DIR = "./workspaces"
|
||||
STORAGE_DIR = "./.files"
|
||||
|
||||
os.makedirs(STORAGE_DIR, exist_ok=True)
|
||||
os.makedirs(WORKSPACES_DIR, exist_ok=True)
|
||||
|
||||
# === MAPPING UTENTI ===
|
||||
USER_PROFILES = {
|
||||
"giuseppe@defranceschi.pro": { "role": "admin", "name": "Giuseppe", "workspace": "admin_workspace", "rag_collection": "admin_docs", "capabilities": ["debug", "all"], "show_code": True },
|
||||
"giuseppe.defranceschi@gmail.com": { "role": "admin", "name": "Giuseppe", "workspace": "admin_workspace", "rag_collection": "admin_docs", "capabilities": ["debug", "all"], "show_code": True },
|
||||
"federica.tecchio@gmail.com": { "role": "business", "name": "Federica", "workspace": "business_workspace", "rag_collection": "contabilita", "capabilities": ["basic_chat"], "show_code": False },
|
||||
"riccardob545@gmail.com": { "role": "engineering", "name": "Riccardo", "workspace": "engineering_workspace", "rag_collection": "engineering_docs", "capabilities": ["code"], "show_code": True },
|
||||
"giuliadefranceschi05@gmail.com": { "role": "architecture", "name": "Giulia", "workspace": "architecture_workspace", "rag_collection": "architecture_manuals", "capabilities": ["visual"], "show_code": False }
|
||||
}
|
||||
|
||||
# === STORAGE CLIENT ===
|
||||
class LocalStorageClient(BaseStorageClient):
|
||||
def __init__(self, storage_path: str):
|
||||
self.storage_path = storage_path
|
||||
os.makedirs(storage_path, exist_ok=True)
|
||||
async def upload_file(self, object_key: str, data: bytes, mime: str = "application/octet-stream", overwrite: bool = True) -> Dict[str, str]:
|
||||
file_path = os.path.join(self.storage_path, object_key)
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, "wb") as f: f.write(data)
|
||||
return {"object_key": object_key, "url": f"/files/{object_key}"}
|
||||
async def get_read_url(self, object_key: str) -> str: return f"/files/{object_key}"
|
||||
async def delete_file(self, object_key: str) -> bool:
|
||||
path = os.path.join(self.storage_path, object_key)
|
||||
if os.path.exists(path): os.remove(path); return True
|
||||
return False
|
||||
async def close(self): pass
|
||||
|
||||
@cl.data_layer
|
||||
def get_data_layer():
|
||||
return SQLAlchemyDataLayer(conninfo=DATABASE_URL, storage_provider=LocalStorageClient(STORAGE_DIR))
|
||||
|
||||
# === OAUTH & UTILS ===
|
||||
@cl.oauth_callback
|
||||
def oauth_callback(provider_id: str, token: str, raw_user_data: Dict[str, str], default_user: cl.User) -> Optional[cl.User]:
|
||||
if provider_id == "google":
|
||||
email = raw_user_data.get("email", "").lower()
|
||||
profile = USER_PROFILES.get(email, USER_PROFILES.get("guest", {"role": "guest", "name": "Guest", "workspace": "guest", "rag_collection": "public", "show_code": False}))
|
||||
default_user.metadata.update({"role": profile["role"], "workspace": profile["workspace"], "rag_collection": profile["rag_collection"], "show_code": profile["show_code"], "display_name": profile["name"]})
|
||||
return default_user
|
||||
return default_user
|
||||
|
||||
def create_workspace(workspace_name: str) -> str:
|
||||
path = os.path.join(WORKSPACES_DIR, workspace_name)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
# === CORE: DOCLING ===
|
||||
def process_file_with_docling(file_path: str) -> str:
|
||||
try:
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
return result.document.export_to_markdown()
|
||||
except Exception as e:
|
||||
print(f"❌ Docling Error: {e}")
|
||||
return ""
|
||||
|
||||
# === CORE: BGE-M3 CLIENT ===
|
||||
def get_bge_embeddings(text: str) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
payload = {"texts": [text[:8000]]}
|
||||
response = requests.post(BGE_API_URL, json=payload, timeout=30)
|
||||
response.raise_for_status()
|
||||
data = response.json().get("data", [])
|
||||
if data:
|
||||
return data[0]
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"❌ BGE API Error: {e}")
|
||||
return None
|
||||
|
||||
# === CORE: QDRANT ===
|
||||
async def ensure_collection(collection_name: str):
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
if not await client.collection_exists(collection_name):
|
||||
await client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config={"dense": VectorParams(size=1024, distance=Distance.COSINE)},
|
||||
sparse_vectors_config={"sparse": SparseVectorParams()}
|
||||
)
|
||||
|
||||
async def index_document(file_name: str, content: str, collection_name: str):
|
||||
await ensure_collection(collection_name)
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
|
||||
chunk_size = 2000
|
||||
overlap = 200
|
||||
|
||||
points = []
|
||||
for i in range(0, len(content), chunk_size - overlap):
|
||||
chunk = content[i : i + chunk_size]
|
||||
embedding_data = get_bge_embeddings(chunk)
|
||||
|
||||
if embedding_data:
|
||||
points.append(PointStruct(
|
||||
id=str(uuid.uuid4()),
|
||||
vector={
|
||||
"dense": embedding_data["dense"],
|
||||
"sparse": embedding_data["sparse"]
|
||||
},
|
||||
payload={
|
||||
"file_name": file_name,
|
||||
"content": chunk,
|
||||
"indexed_at": datetime.now().isoformat()
|
||||
}
|
||||
))
|
||||
|
||||
if points:
|
||||
await client.upsert(collection_name=collection_name, points=points)
|
||||
return len(points)
|
||||
return 0
|
||||
|
||||
async def search_hybrid(query: str, collection_name: str, limit: int = 4) -> str:
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
if not await client.collection_exists(collection_name): return ""
|
||||
|
||||
query_emb = get_bge_embeddings(query)
|
||||
if not query_emb: return ""
|
||||
|
||||
# CORREZIONE QUI: Usiamo l'oggetto Prefetch importato correttamente
|
||||
results = await client.query_points(
|
||||
collection_name=collection_name,
|
||||
prefetch=[
|
||||
Prefetch(
|
||||
query=query_emb["sparse"],
|
||||
using="sparse",
|
||||
limit=limit * 2
|
||||
)
|
||||
],
|
||||
query=query_emb["dense"],
|
||||
using="dense",
|
||||
limit=limit
|
||||
)
|
||||
|
||||
context = []
|
||||
for hit in results.points:
|
||||
context.append(f"--- DA {hit.payload['file_name']} ---\n{hit.payload['content']}")
|
||||
|
||||
return "\n\n".join(context)
|
||||
|
||||
# === Caching Embeddings ===
|
||||
@lru_cache(maxsize=1000)
|
||||
def get_bge_embeddings_cached(text: str):
|
||||
"""Cache per query ripetute"""
|
||||
return get_bge_embeddings(text)
|
||||
|
||||
# === CHAINLIT HANDLERS ===
|
||||
@cl.on_chat_start
|
||||
async def start():
|
||||
# 1. Profilo utente
|
||||
user = cl.user_session.get("user")
|
||||
email = user.identifier if user else "guest"
|
||||
profile = USER_PROFILES.get(email, USER_PROFILES["giuseppe@defranceschi.pro"])
|
||||
|
||||
cl.user_session.set("profile", profile)
|
||||
create_workspace(profile["workspace"])
|
||||
|
||||
# 2. Badge HTML personalizzato
|
||||
role_color = {
|
||||
"admin": "#e74c3c",
|
||||
"engineering": "#3498db",
|
||||
"business": "#2ecc71",
|
||||
"architecture": "#9b59b6",
|
||||
}.get(profile["role"], "#95a5a6")
|
||||
|
||||
badge_html = f"""
|
||||
<div style="background:{role_color}; padding:8px; border-radius:8px; margin-bottom:16px;">
|
||||
👤 <b>{profile['name']}</b> | 🔧 {profile['role'].upper()} | 📁 {profile['workspace']}
|
||||
</div>
|
||||
"""
|
||||
await cl.Message(content=badge_html).send()
|
||||
|
||||
# 3. Settings UI
|
||||
settings = await cl.ChatSettings(
|
||||
[
|
||||
cl.input_widget.Slider(
|
||||
id="top_k",
|
||||
label="Numero Documenti RAG",
|
||||
initial=4,
|
||||
min=1,
|
||||
max=10,
|
||||
step=1,
|
||||
),
|
||||
cl.input_widget.Select(
|
||||
id="vision_detail",
|
||||
label="Dettaglio Analisi Immagini",
|
||||
values=["auto", "low", "high"],
|
||||
initial_value="auto",
|
||||
),
|
||||
cl.input_widget.TextInput(
|
||||
id="system_instruction",
|
||||
label="Istruzione Sistema Custom (opzionale)",
|
||||
initial="",
|
||||
placeholder="Es: Rispondi sempre in formato tecnico...",
|
||||
),
|
||||
cl.input_widget.Select(
|
||||
id="model",
|
||||
label="Modello di Ragionamento",
|
||||
values=[DEFAULT_TEXT_MODEL, "llama3.2", "mistral", "qwen2.5-coder:32b"],
|
||||
initial_value=DEFAULT_TEXT_MODEL,
|
||||
),
|
||||
cl.input_widget.Slider(
|
||||
id="temperature",
|
||||
label="Creatività (Temperatura)",
|
||||
initial=0.3,
|
||||
min=0,
|
||||
max=1,
|
||||
step=0.1,
|
||||
),
|
||||
cl.input_widget.Switch(
|
||||
id="rag_enabled",
|
||||
label="Usa Conoscenza Documenti (RAG)",
|
||||
initial=True,
|
||||
),
|
||||
]
|
||||
).send()
|
||||
|
||||
cl.user_session.set("settings", settings)
|
||||
|
||||
# 4. Messaggio iniziale (opzionale)
|
||||
await cl.Message(
|
||||
content=(
|
||||
f"🚀 **Vision-RAG Hybrid System Online**\n"
|
||||
f"Utente: {profile['name']} | Workspace: {profile['workspace']}\n"
|
||||
f"Engine: Docling + BGE-M3 + {VISION_MODEL}"
|
||||
)
|
||||
).send()
|
||||
|
||||
|
||||
cl.user_session.set("settings", settings)
|
||||
|
||||
await cl.Message(f"🚀 **Vision-RAG Hybrid System Online**\nUtente: {profile['name']} | Workspace: {profile['workspace']}\nEngine: Docling + BGE-M3 + {VISION_MODEL}").send()
|
||||
|
||||
@cl.on_settings_update
|
||||
async def setup_agent(settings):
|
||||
cl.user_session.set("settings", settings)
|
||||
await cl.Message(content=f"✅ Impostazioni aggiornate: Modello {settings['model']}").send()
|
||||
|
||||
async def log_metrics(metrics: dict):
|
||||
# Versione minima: log su stdout
|
||||
print("[METRICS]", metrics)
|
||||
|
||||
# In futuro puoi:
|
||||
# - salvarle in Postgres
|
||||
# - mandarle a Prometheus / Grafana
|
||||
# - scriverle su file JSON per analisi settimanale
|
||||
|
||||
# - Resume Chat Handler
|
||||
|
||||
@cl.on_chat_resume
|
||||
async def on_chat_resume(thread: ThreadDict):
|
||||
"""
|
||||
Viene chiamato quando l'utente clicca 'Riprendi' su una chat archiviata.
|
||||
Chainlit carica già i messaggi nella UI, qui puoi solo ripristinare la sessione.
|
||||
"""
|
||||
# Se vuoi, puoi recuperare l'identifier dell’utente dal thread
|
||||
user_identifier = thread.get("userIdentifier")
|
||||
profile = USER_PROFILES.get(
|
||||
user_identifier,
|
||||
USER_PROFILES["giuseppe@defranceschi.pro"],
|
||||
)
|
||||
cl.user_session.set("profile", profile)
|
||||
|
||||
# Puoi anche ripristinare eventuale stato custom (es: impostazioni di default)
|
||||
# oppure semplicemente salutare l’utente
|
||||
await cl.Message(
|
||||
content="👋 Bentornato! Possiamo riprendere da questa conversazione."
|
||||
).send()
|
||||
|
||||
@cl.on_message
|
||||
async def main(message: cl.Message):
|
||||
start_time = time.time()
|
||||
|
||||
profile = cl.user_session.get("profile")
|
||||
settings = cl.user_session.get("settings", {})
|
||||
|
||||
selected_model = settings.get("model", DEFAULT_TEXT_MODEL)
|
||||
temperature = settings.get("temperature", 0.3)
|
||||
rag_enabled = settings.get("rag_enabled", True)
|
||||
|
||||
workspace = create_workspace(profile["workspace"])
|
||||
|
||||
images_for_vision = []
|
||||
doc_context = ""
|
||||
rag_context = "" # ← la inizializzi qui, così esiste sempre
|
||||
|
||||
# 1. GESTIONE FILE
|
||||
if message.elements:
|
||||
for element in message.elements:
|
||||
file_path = os.path.join(workspace, element.name)
|
||||
shutil.copy(element.path, file_path)
|
||||
|
||||
if "image" in element.mime:
|
||||
images_for_vision.append(file_path)
|
||||
msg_img = cl.Message(
|
||||
content=f"👁️ Analizzo immagine **{element.name}** con {VISION_MODEL}..."
|
||||
)
|
||||
await msg_img.send()
|
||||
|
||||
with open(file_path, "rb") as img_file:
|
||||
img_bytes = img_file.read()
|
||||
|
||||
client_sync = ollama.Client(host=OLLAMA_URL)
|
||||
res = client_sync.chat(
|
||||
model=VISION_MODEL,
|
||||
messages=[{
|
||||
"role": "user",
|
||||
"content": (
|
||||
"Analizza questa immagine tecnica. Trascrivi testi, codici "
|
||||
"e descrivi diagrammi o tabelle in dettaglio."
|
||||
),
|
||||
"images": [img_bytes],
|
||||
}],
|
||||
)
|
||||
desc = res["message"]["content"]
|
||||
doc_context += f"\n\n[DESCRIZIONE IMMAGINE {element.name}]:\n{desc}"
|
||||
msg_img.content = f"✅ Immagine analizzata:\n{desc[:200]}..."
|
||||
await msg_img.update()
|
||||
|
||||
elif element.name.endswith((".pdf", ".docx")):
|
||||
msg_doc = cl.Message(
|
||||
content=f"📄 Leggo **{element.name}** con Docling (tabelle/formule)..."
|
||||
)
|
||||
await msg_doc.send()
|
||||
|
||||
markdown_content = process_file_with_docling(file_path)
|
||||
if markdown_content:
|
||||
chunks = await index_document(
|
||||
element.name, markdown_content, profile["rag_collection"]
|
||||
)
|
||||
msg_doc.content = (
|
||||
f"✅ **{element.name}**: Convertito e salvato {chunks} "
|
||||
"frammenti nel DB vettoriale."
|
||||
)
|
||||
doc_context += (
|
||||
f"\n\n[CONTENUTO FILE {element.name}]:\n"
|
||||
f"{markdown_content[:1000]}..."
|
||||
)
|
||||
else:
|
||||
msg_doc.content = f"❌ Errore lettura {element.name}"
|
||||
await msg_doc.update()
|
||||
|
||||
# 2. RAG RETRIEVAL
|
||||
if rag_enabled and not images_for_vision:
|
||||
rag_context = await search_hybrid(
|
||||
message.content, profile["rag_collection"]
|
||||
)
|
||||
|
||||
final_context = ""
|
||||
if rag_context:
|
||||
final_context += f"CONTESTO RAG:\n{rag_context}\n"
|
||||
if doc_context:
|
||||
final_context += f"CONTESTO SESSIONE CORRENTE:\n{doc_context}\n"
|
||||
|
||||
system_prompt = (
|
||||
"Sei un assistente tecnico esperto. Usa il contesto fornito "
|
||||
"(incluso Markdown di tabelle e descrizioni immagini) per "
|
||||
"rispondere con precisione. Cita i documenti fonte."
|
||||
)
|
||||
|
||||
msg = cl.Message(content="")
|
||||
await msg.send()
|
||||
|
||||
error = None
|
||||
|
||||
# 3. GENERAZIONE
|
||||
try:
|
||||
client_async = ollama.AsyncClient(host=OLLAMA_URL)
|
||||
stream = await client_async.chat(
|
||||
model=selected_model,
|
||||
messages=[
|
||||
{"role": "system", "content": system_prompt},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Domanda: {message.content}\n\n{final_context}",
|
||||
},
|
||||
],
|
||||
options={"temperature": temperature},
|
||||
stream=True,
|
||||
)
|
||||
|
||||
async for chunk in stream:
|
||||
content = chunk["message"]["content"]
|
||||
await msg.stream_token(content)
|
||||
await msg.update()
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
await msg.stream_token(f"❌ Errore AI: {error}")
|
||||
await msg.update()
|
||||
|
||||
# 4. SALVATAGGIO CODICE
|
||||
if profile["show_code"]:
|
||||
code_blocks = re.findall(r"``````", msg.content, re.DOTALL)
|
||||
if code_blocks:
|
||||
for i, code in enumerate(code_blocks):
|
||||
fname = f"script_{datetime.now().strftime('%H%M%S')}_{i}.py"
|
||||
with open(os.path.join(workspace, fname), "w") as f:
|
||||
f.write(code.strip())
|
||||
await cl.Message(
|
||||
content=f"💾 Script salvato: `{fname}`"
|
||||
).send()
|
||||
|
||||
# 5. METRICHE (ALLA FINE)
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
# Se rag_context è una stringa concatenata, puoi stimare i "rag_hits"
|
||||
# contando i separatori che usi in search_hybrid (es. '--- DA ')
|
||||
if rag_context:
|
||||
rag_hits = rag_context.count("--- DA ")
|
||||
else:
|
||||
rag_hits = 0
|
||||
|
||||
metrics = {
|
||||
"response_time": elapsed,
|
||||
"rag_hits": rag_hits,
|
||||
"model": selected_model,
|
||||
"user_role": profile["role"],
|
||||
"error": error,
|
||||
}
|
||||
|
||||
await log_metrics(metrics)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -6,4 +6,3 @@ workspaces/*
|
|||
qdrant_storage/.files/
|
||||
__pycache__/
|
||||
.env
|
||||
.files/
|
||||
|
|
@ -3,9 +3,12 @@ FROM python:3.11-slim
|
|||
WORKDIR /app
|
||||
|
||||
# Installa dipendenze sistema
|
||||
# Aggiunte libgl1 e libglib2.0-0 per il supporto Docling/CV2
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
postgresql-client \
|
||||
libgl1 \
|
||||
libglib2.0-0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copia requirements e installa
|
||||
|
|
|
|||
593
README.md
593
README.md
|
|
@ -1,325 +1,342 @@
|
|||
# AI Station - Document Analysis Platform
|
||||
# AI Station DFFM - Vision-RAG Hybrid System
|
||||
|
||||
## 📋 Overview
|
||||
Sistema AI multi-utente con supporto RAG (Retrieval-Augmented Generation), analisi immagini e gestione documenti avanzata, basato su Chainlit, Ollama e BGE-M3.
|
||||
|
||||
**AI Station** è una piattaforma di analisi documentale basata su AI che utilizza **Retrieval-Augmented Generation (RAG)** per analizzare PDF e documenti testuali con il modello **GLM-4.6:Cloud**.
|
||||
## 🌟 Features
|
||||
|
||||
### Core AI
|
||||
- **RAG Hybrid Search** con BGE-M3 (dense + sparse embeddings)
|
||||
- **Vision Analysis** tramite MiniCPM-V per OCR e descrizione immagini
|
||||
- **Document Processing** con Docling (PDF, DOCX) con preservazione tabelle/formule
|
||||
- **Multi-Model Support** (Ollama locale + cloud models)
|
||||
- **Streaming Responses** con latenza ridotta
|
||||
|
||||
### Multi-Utente
|
||||
- **OAuth2 Google** con profili personalizzati per ruolo
|
||||
- **Workspace isolati** per utente/team
|
||||
- **RAG Collections dedicate** per knowledge base separate
|
||||
- **Permessi granulari** (admin, engineering, business, architecture)
|
||||
|
||||
### UI/UX
|
||||
- **Badge ruolo personalizzato** con colori dedicati
|
||||
- **Settings dinamici** (temperatura, top_k RAG, modello, istruzioni custom)
|
||||
- **Chat history persistente** con ripresa conversazioni
|
||||
- **Auto-save codice Python** estratto dalle risposte
|
||||
- **Metriche real-time** (response time, RAG hits, errori)
|
||||
|
||||
### Performance
|
||||
- **Caching embeddings** (LRU cache 1000 query)
|
||||
- **Chunking intelligente** (2000 char con overlap 200)
|
||||
- **Async operations** su Qdrant e Ollama
|
||||
- **PostgreSQL** per persistenza thread e metadata
|
||||
|
||||
## 🏗️ Architettura
|
||||
|
||||
### Hardware Setup
|
||||
|
||||
#### AI-SRV (Chainlit VM)
|
||||
- **IP**: 192.168.1.244
|
||||
- **CPU**: 16 core (QEMU Virtual)
|
||||
- **RAM**: 64 GB
|
||||
- **Storage**: 195 GB
|
||||
- **Ruolo**: Host Chainlit app + PostgreSQL + Qdrant
|
||||
|
||||
#### AI-Server (GPU Workstation)
|
||||
- **IP**: 192.168.1.243
|
||||
- **CPU**: Intel Core Ultra 7 265 (20 core, max 6.5 GHz)
|
||||
- **RAM**: 32 GB
|
||||
- **GPU**: NVIDIA RTX A1000 (8 GB VRAM)
|
||||
- **Storage**: 936 GB NVMe
|
||||
- **Ruolo**: Ollama models + BGE-M3 embeddings service
|
||||
|
||||
### Stack Tecnologico
|
||||
- **Backend**: Python + Chainlit (LLM UI framework)
|
||||
- **LLM**: GLM-4.6:Cloud (via Ollama Cloud)
|
||||
- **Vector DB**: Qdrant (semantic search)
|
||||
- **PDF Processing**: PyMuPDF (fitz)
|
||||
- **Database**: PostgreSQL + SQLAlchemy ORM
|
||||
- **Containerization**: Docker Compose
|
||||
- **Embeddings**: nomic-embed-text (via Ollama local)
|
||||
|
||||
---
|
||||
┌─────────────────────────────────────────┐
|
||||
│ Chainlit UI (ai-srv) │
|
||||
│ Badge + Settings + Chat History │
|
||||
└──────────────┬──────────────────────────┘
|
||||
│
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ Python Backend (app.py) │
|
||||
│ - OAuth2 Google │
|
||||
│ - Multi-user profiles │
|
||||
│ - File processing orchestration │
|
||||
└─┬────────┬──────────┬──────────┬────────┘
|
||||
│ │ │ │
|
||||
▼ ▼ ▼ ▼
|
||||
┌─────┐ ┌─────┐ ┌────────┐ ┌──────────┐
|
||||
│ PG │ │Qdrant│ │ Ollama │ │ BGE API │
|
||||
│ │ │Vector│ │ GPU │ │ CPU │
|
||||
│ │ │ DB │ │ Server │ │ Server │
|
||||
└─────┘ └─────┘ └────────┘ └──────────┘
|
||||
ai-srv ai-srv ai-server ai-server
|
||||
|
||||
## 🚀 Quick Start
|
||||
text
|
||||
|
||||
### Prerequisites
|
||||
- Docker & Docker Compose
|
||||
- Ollama installed locally (for embeddings)
|
||||
- Ollama Cloud account (for glm-4.6:cloud)
|
||||
## 📋 Requisiti
|
||||
|
||||
### 1️⃣ Clone & Setup
|
||||
### Sistema
|
||||
- Docker 24.x+ con Docker Compose
|
||||
- Accesso a Google Cloud Console (per OAuth2)
|
||||
- 2 server (o VM) con networking condiviso
|
||||
|
||||
### Modelli Ollama (da installare su ai-server)
|
||||
```bash
|
||||
git clone git@github.com:your-username/ai-station.git
|
||||
ollama pull minicpm-v # Vision model (5.5 GB)
|
||||
ollama pull glm-4.6:cloud # Cloud reasoning
|
||||
ollama pull qwen2.5-coder:32b # Code generation (9 GB)
|
||||
ollama pull llama3.2 # Fast general purpose (4.7 GB)
|
||||
🚀 Installazione
|
||||
1. Clone Repository
|
||||
bash
|
||||
git clone <your-repo>
|
||||
cd ai-station
|
||||
2. Configurazione Ambiente
|
||||
Crea .env:
|
||||
|
||||
# Configure environment
|
||||
cat > .env << 'EOF'
|
||||
DATABASE_URL=postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station
|
||||
bash
|
||||
# Database
|
||||
DATABASE_URL=postgresql+asyncpg://ai_user:CHANGE_ME@postgres:5432/ai_station
|
||||
|
||||
# AI Services
|
||||
OLLAMA_URL=http://192.168.1.243:11434
|
||||
QDRANT_URL=http://qdrant:6333
|
||||
EOF
|
||||
```
|
||||
BGE_API_URL=http://192.168.1.243:8001/embed
|
||||
|
||||
### 2️⃣ Authenticate Ollama Cloud
|
||||
```bash
|
||||
ollama signin
|
||||
# Follow the link to authenticate with your Ollama account
|
||||
```
|
||||
# OAuth Google
|
||||
OAUTH_GOOGLE_CLIENT_ID=your-client-id.apps.googleusercontent.com
|
||||
OAUTH_GOOGLE_CLIENT_SECRET=your-secret
|
||||
CHAINLIT_AUTH_SECRET=$(openssl rand -base64 32)
|
||||
|
||||
### 3️⃣ Start Services
|
||||
```bash
|
||||
docker compose up -d
|
||||
3. Configurazione OAuth Google
|
||||
Vai su Google Cloud Console
|
||||
|
||||
Crea nuovo progetto → API e servizi → Credenziali
|
||||
|
||||
Crea "ID client OAuth 2.0"
|
||||
|
||||
Aggiungi URI autorizzati:
|
||||
|
||||
https://ai.dffm.it/auth/oauth/google/callback
|
||||
|
||||
http://localhost:8000/auth/oauth/google/callback (dev)
|
||||
|
||||
Copia Client ID e Secret in .env
|
||||
|
||||
4. Personalizza Utenti
|
||||
Modifica app.py → USER_PROFILES:
|
||||
|
||||
python
|
||||
USER_PROFILES = {
|
||||
"tuo.email@example.com": {
|
||||
"role": "admin",
|
||||
"name": "Nome",
|
||||
"workspace": "workspace_name",
|
||||
"rag_collection": "docs_collection",
|
||||
"capabilities": ["debug", "all"],
|
||||
"show_code": True,
|
||||
},
|
||||
# ... altri utenti
|
||||
}
|
||||
5. Deploy
|
||||
bash
|
||||
# Build e avvio
|
||||
docker compose up -d --build
|
||||
|
||||
# Verifica logs
|
||||
docker compose logs -f chainlit-app
|
||||
|
||||
# Dovresti vedere:
|
||||
# ✅ Tutte le tabelle create con successo.
|
||||
# Your app is available at http://localhost:8000
|
||||
6. Setup BGE-M3 Service (su ai-server)
|
||||
bash
|
||||
# Installa dependencies
|
||||
pip install fastapi uvicorn FlagEmbedding torch
|
||||
|
||||
# Salva il file bge_service.py (vedi docs/)
|
||||
python bge_service.py
|
||||
# Listening on http://0.0.0.0:8001
|
||||
🎯 Utilizzo
|
||||
Login
|
||||
Accedi via browser: https://ai.dffm.it (o http://localhost:8000)
|
||||
|
||||
Click su "Continue with Google"
|
||||
|
||||
Autorizza con account configurato in USER_PROFILES
|
||||
|
||||
Chat con RAG
|
||||
Carica PDF/DOCX → Sistema li indicizza automaticamente
|
||||
|
||||
Fai domande → Risposta con contesto dai documenti
|
||||
|
||||
Regola top_k (numero documenti) via settings
|
||||
|
||||
Analisi Immagini
|
||||
Carica screenshot/diagrammi
|
||||
|
||||
Il sistema:
|
||||
|
||||
Estrae testo (OCR)
|
||||
|
||||
Descrive grafici/tabelle
|
||||
|
||||
Usa descrizione come contesto per rispondere
|
||||
|
||||
Settings Disponibili
|
||||
Numero Documenti RAG (1-10): Quanti chunk recuperare
|
||||
|
||||
Modello: Scegli tra locale/cloud
|
||||
|
||||
Temperatura (0-1): Creatività risposta
|
||||
|
||||
RAG Enabled: On/Off recupero documenti
|
||||
|
||||
Istruzione Custom: Prompt system personalizzato
|
||||
|
||||
Ripresa Chat
|
||||
Sidebar → Chat History
|
||||
|
||||
Click su conversazione → "Riprendi"
|
||||
|
||||
Continua da dove avevi lasciato
|
||||
|
||||
📊 Metriche
|
||||
Ogni risposta logga (stdout):
|
||||
|
||||
json
|
||||
{
|
||||
"response_time": 18.65,
|
||||
"rag_hits": 4,
|
||||
"model": "glm-4.6:cloud",
|
||||
"user_role": "admin",
|
||||
"error": null
|
||||
}
|
||||
Raccogli con:
|
||||
|
||||
bash
|
||||
docker logs ai-station-app | grep METRICS > metrics.log
|
||||
🔧 Troubleshooting
|
||||
RAG non trova documenti
|
||||
Verifica collection name in USER_PROFILES[email]["rag_collection"]
|
||||
|
||||
Controlla Qdrant: curl http://localhost:6333/collections
|
||||
|
||||
Badge HTML non si vede
|
||||
Abilita in .chainlit/config.toml:
|
||||
```
|
||||
|
||||
### 4️⃣ Access UI
|
||||
Navigate to: **http://localhost:8000**
|
||||
|
||||
---
|
||||
|
||||
## 📁 Project Structure
|
||||
```text
|
||||
[features]
|
||||
unsafe_allow_html = true
|
||||
Modello Ollama non risponde
|
||||
bash
|
||||
# Testa connessione
|
||||
curl http://192.168.1.243:11434/api/tags
|
||||
|
||||
# Verifica modello disponibile
|
||||
ollama list
|
||||
BGE embeddings fail
|
||||
```
|
||||
```bash
|
||||
# Testa API
|
||||
curl -X POST http://192.168.1.243:8001/embed \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"texts": ["test"]}'
|
||||
```
|
||||
|
||||
📁 Struttura Progetto
|
||||
```bash
|
||||
ai-station/
|
||||
├── app.py # Main Chainlit application
|
||||
├── requirements.txt # Python dependencies
|
||||
├── docker-compose.yml # Docker services config
|
||||
├── .env # Environment variables (gitignored)
|
||||
├── workspaces/ # User workspace directories
|
||||
│ └── admin/ # Admin user files
|
||||
└── README.md # This file
|
||||
├── app.py # Main Chainlit app
|
||||
├── init_db.py # Database schema init
|
||||
├── requirements.txt # Python deps
|
||||
├── Dockerfile # Container config
|
||||
├── docker-compose.yaml # Multi-service orchestration
|
||||
├── .chainlit/
|
||||
│ └── config.toml # UI/features config
|
||||
├── public/
|
||||
│ └── custom.css # Custom styling
|
||||
├── workspaces/ # User file storage (volume)
|
||||
│ ├── admin_workspace/
|
||||
│ ├── engineering_workspace/
|
||||
│ └── ...
|
||||
└── .files/ # Chainlit storage (volume)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Features
|
||||
|
||||
### ✅ Implemented
|
||||
- **PDF Upload & Processing**: Extract text from PDF documents using PyMuPDF
|
||||
- **Document Indexing**: Automatic chunking and semantic indexing via Qdrant
|
||||
- **RAG Search**: Retrieve relevant document chunks based on semantic similarity
|
||||
- **Intelligent Analysis**: GLM-4.6:Cloud analyzes documents with full context
|
||||
- **Code Extraction**: Automatically save Python code blocks from responses
|
||||
- **Chat History**: Persistent conversation storage via SQLAlchemy
|
||||
- **Streaming Responses**: Real-time token streaming via Chainlit
|
||||
|
||||
### 🔄 Workflow
|
||||
1. User uploads PDF or TXT file
|
||||
2. System extracts text and creates semantic chunks
|
||||
3. Chunks indexed in Qdrant vector database
|
||||
4. User asks questions about documents
|
||||
5. RAG retrieves relevant chunks
|
||||
6. GLM-4.6:Cloud analyzes with full context
|
||||
7. Streaming response to user
|
||||
|
||||
---
|
||||
|
||||
## 📊 Technical Details
|
||||
|
||||
### Document Processing Pipeline
|
||||
|
||||
```
|
||||
PDF Upload
|
||||
↓
|
||||
PyMuPDF Text Extraction
|
||||
↓
|
||||
Text Chunking (1500 chars, 200 char overlap)
|
||||
↓
|
||||
nomic-embed-text Embeddings (Ollama local)
|
||||
↓
|
||||
Qdrant Vector Storage
|
||||
↓
|
||||
Semantic Search on User Query
|
||||
↓
|
||||
GLM-4.6:Cloud Analysis with RAG Context
|
||||
↓
|
||||
Chainlit Streaming Response
|
||||
```
|
||||
|
||||
### Key Functions
|
||||
|
||||
| Function | Purpose |
|
||||
|----------|---------|
|
||||
| `extract_text_from_pdf()` | Convert PDF to text using PyMuPDF |
|
||||
| `chunk_text()` | Split text into overlapping chunks |
|
||||
| `get_embeddings()` | Generate embeddings via Ollama |
|
||||
| `index_document()` | Store chunks in Qdrant |
|
||||
| `search_qdrant()` | Retrieve relevant context |
|
||||
| `on_message()` | Process user queries with RAG |
|
||||
|
||||
---
|
||||
|
||||
## 🔐 Environment Variables
|
||||
|
||||
```env
|
||||
DATABASE_URL=postgresql+asyncpg://user:pass@postgres:5432/ai_station
|
||||
OLLAMA_URL=http://192.168.1.243:11434 # Local Ollama for embeddings
|
||||
QDRANT_URL=http://qdrant:6333 # Vector database
|
||||
```
|
||||
|
||||
**Note**: GLM-4.6:Cloud authentication is handled automatically via `ollama signin`
|
||||
|
||||
---
|
||||
|
||||
## 🐳 Docker Services
|
||||
|
||||
| Service | Port | Purpose |
|
||||
|---------|------|---------|
|
||||
| `chainlit-app` | 8000 | Chainlit UI & API |
|
||||
| `postgres` | 5432 | Conversation persistence |
|
||||
| `qdrant` | 6333 | Vector database |
|
||||
| `ollama` | 11434 | Local embeddings (external) |
|
||||
|
||||
Start/Stop:
|
||||
🔐 Sicurezza
|
||||
```bash
|
||||
docker compose up -d # Start all services
|
||||
docker compose down # Stop all services
|
||||
docker compose logs -f # View logs
|
||||
docker compose restart # Restart services
|
||||
OAuth2 obbligatorio (no accesso anonimo)
|
||||
|
||||
Workspace isolation (file separati per utente)
|
||||
|
||||
HTML sanitization (configurable via unsafe_allow_html)
|
||||
|
||||
Environment secrets (.env mai committato)
|
||||
|
||||
PostgreSQL passwords cambiate da default
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📝 Usage Examples
|
||||
|
||||
### Example 1: Analyze Tax Document
|
||||
```
|
||||
User: "Qual è l'importo totale del documento?"
|
||||
AI Station:
|
||||
✅ Extracts PDF content
|
||||
✅ Searches relevant sections
|
||||
✅ Analyzes with GLM-4.6:Cloud
|
||||
📄 Returns: "Based on the document, the total amount is..."
|
||||
```
|
||||
|
||||
### Example 2: Multi-Document Analysis
|
||||
```
|
||||
1. Upload multiple PDFs (invoices, contracts)
|
||||
2. All documents automatically indexed
|
||||
3. Query across all documents simultaneously
|
||||
4. RAG retrieves most relevant chunks
|
||||
5. GLM-4.6:Cloud synthesizes answer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Development
|
||||
|
||||
### Install Dependencies
|
||||
🚦 Roadmap
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
Re-ranking con cross-encoder
|
||||
|
||||
Query expansion automatica
|
||||
|
||||
Feedback loop (👍👎 su risposte)
|
||||
|
||||
Export conversazioni PDF/Markdown
|
||||
|
||||
Multi-query RAG parallelo
|
||||
|
||||
Prometheus/Grafana monitoring
|
||||
|
||||
Adaptive chunking per tipo documento
|
||||
|
||||
Audio input support
|
||||
```
|
||||
|
||||
### Requirements
|
||||
```
|
||||
chainlit==1.3.2
|
||||
pydantic==2.9.2
|
||||
ollama>=0.1.0
|
||||
asyncpg>=0.29.0
|
||||
psycopg2-binary
|
||||
qdrant-client>=1.10.0
|
||||
sqlalchemy>=2.0.0
|
||||
greenlet>=3.0.0
|
||||
sniffio
|
||||
aiohttp
|
||||
alembic
|
||||
pymupdf
|
||||
python-dotenv
|
||||
|
||||
## 📝 Licenza
|
||||
```tect
|
||||
MIT License - vedi file [LICENSE](LICENSE) per dettagli.
|
||||
Crea file LICENSE nella root del progetto
|
||||
text
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 DFFM / Giuseppe De Franceschi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
```
|
||||
|
||||
### Local Testing (without Docker)
|
||||
```bash
|
||||
# Start Ollama, PostgreSQL, Qdrant manually
|
||||
ollama serve &
|
||||
chainlit run app.py
|
||||
```
|
||||
👥 Contributors
|
||||
Giuseppe De Franceschi - @defranceschi
|
||||
|
||||
---
|
||||
🙏 Credits
|
||||
Chainlit - UI framework
|
||||
|
||||
## 🔄 Model Details
|
||||
Ollama - LLM runtime
|
||||
|
||||
### GLM-4.6:Cloud
|
||||
- **Provider**: Zhipu AI via Ollama Cloud
|
||||
- **Capabilities**: Long context, reasoning, multilingual
|
||||
- **Cost**: Free tier available
|
||||
- **Authentication**: Device key (automatic via `ollama signin`)
|
||||
Qdrant - Vector DB
|
||||
|
||||
### nomic-embed-text
|
||||
- **Local embedding model** for chunking/retrieval
|
||||
- **Dimensions**: 768
|
||||
- **Speed**: Fast, runs locally
|
||||
- **Used for**: RAG semantic search
|
||||
BGE-M3 - Embeddings
|
||||
|
||||
---
|
||||
Docling - Document processing
|
||||
|
||||
## 📈 Monitoring & Logs
|
||||
|
||||
### Check Service Health
|
||||
```bash
|
||||
# View all logs
|
||||
docker compose logs
|
||||
|
||||
# Follow live logs
|
||||
docker compose logs -f chainlit-app
|
||||
|
||||
# Check specific container
|
||||
docker inspect ai-station-chainlit-app
|
||||
```
|
||||
|
||||
### Common Issues
|
||||
| Issue | Solution |
|
||||
|-------|----------|
|
||||
| `unauthorized` error | Run `ollama signin` on server |
|
||||
| Database connection failed | Check PostgreSQL is running |
|
||||
| Qdrant unavailable | Verify `docker-compose up` completed |
|
||||
| PDF not extracted | Ensure PyMuPDF installed: `pip install pymupdf` |
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Deployment
|
||||
|
||||
### Production Checklist
|
||||
- [ ] Set secure PostgreSQL credentials in `.env`
|
||||
- [ ] Enable SSL/TLS for Chainlit endpoints
|
||||
- [ ] Configure CORS for frontend
|
||||
- [ ] Setup log aggregation (ELK, Datadog, etc.)
|
||||
- [ ] Implement rate limiting
|
||||
- [ ] Add API authentication
|
||||
- [ ] Configure backup strategy for Qdrant
|
||||
|
||||
### Cloud Deployment Options
|
||||
- **AWS**: ECS + RDS + VectorDB
|
||||
- **Google Cloud**: Cloud Run + Cloud SQL
|
||||
- **DigitalOcean**: App Platform + Managed Databases
|
||||
|
||||
---
|
||||
|
||||
## 📚 API Reference
|
||||
|
||||
### REST Endpoints (via Chainlit)
|
||||
- `POST /api/chat` - Send message with context
|
||||
- `GET /api/threads` - List conversations
|
||||
- `POST /api/upload` - Upload document
|
||||
|
||||
### WebSocket
|
||||
- Real-time streaming responses via Chainlit protocol
|
||||
|
||||
---
|
||||
|
||||
## 🔮 Future Features
|
||||
|
||||
- [ ] OAuth2 Google authentication
|
||||
- [ ] Document metadata extraction (dates, amounts, entities)
|
||||
- [ ] Advanced search filters (type, date range, language)
|
||||
- [ ] Export results (PDF, CSV, JSON)
|
||||
- [ ] Analytics dashboard
|
||||
- [ ] Multi-language support
|
||||
- [ ] Document versioning
|
||||
- [ ] Compliance reporting (GDPR, audit trails)
|
||||
|
||||
---
|
||||
|
||||
## 📞 Support
|
||||
|
||||
### Troubleshooting
|
||||
1. Check logs: `docker compose logs chainlit-app`
|
||||
2. Verify Ollama authentication: `ollama show glm-4.6:cloud`
|
||||
3. Test Qdrant connection: `curl http://localhost:6333/health`
|
||||
4. Inspect PostgreSQL: `docker compose exec postgres psql -U ai_user -d ai_station`
|
||||
|
||||
### Performance Tips
|
||||
- Increase chunk overlap for better context retrieval
|
||||
- Adjust embedding model based on latency requirements
|
||||
- Monitor Qdrant memory usage for large document sets
|
||||
- Implement caching for frequent queries
|
||||
|
||||
---
|
||||
|
||||
## 📄 License
|
||||
|
||||
MIT License - See LICENSE file
|
||||
|
||||
## 👤 Author
|
||||
|
||||
AI Station Team
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: December 26, 2025
|
||||
**Version**: 1.0.0
|
||||
**Status**: Production Ready ✅
|
||||
## **Status**: 🔨 Pre-Production | **Last Update**: 2026-01-01
|
||||
580
app.py
580
app.py
|
|
@ -4,27 +4,20 @@ import uuid
|
|||
import shutil
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
from typing import Optional, Dict, List, Any
|
||||
import chainlit as cl
|
||||
import ollama
|
||||
|
||||
from docling.document_converter import DocumentConverter
|
||||
from qdrant_client import AsyncQdrantClient
|
||||
from qdrant_client.models import (
|
||||
PointStruct,
|
||||
Distance,
|
||||
VectorParams,
|
||||
SparseVectorParams,
|
||||
Prefetch,
|
||||
)
|
||||
|
||||
# CORREZIONE IMPORT: Importiamo le classi necessarie direttamente dalla libreria
|
||||
from qdrant_client.models import PointStruct, Distance, VectorParams, SparseVectorParams, Prefetch
|
||||
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
||||
from chainlit.types import ThreadDict
|
||||
from functools import lru_cache
|
||||
|
||||
# === FIX IMPORT ROBUSTO Storage Client ===
|
||||
# === FIX IMPORT ROBUSTO ===
|
||||
try:
|
||||
from chainlit.data.storage_clients import BaseStorageClient
|
||||
except ImportError:
|
||||
|
|
@ -33,207 +26,68 @@ except ImportError:
|
|||
except ImportError:
|
||||
from chainlit.data.storage_clients.base import BaseStorageClient
|
||||
|
||||
|
||||
# =========================
|
||||
# CONFIG
|
||||
# =========================
|
||||
DATABASE_URL = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station",
|
||||
)
|
||||
# === CONFIGURAZIONE ===
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station")
|
||||
OLLAMA_URL = os.getenv("OLLAMA_URL", "http://192.168.1.243:11434")
|
||||
QDRANT_URL = os.getenv("QDRANT_URL", "http://qdrant:6333")
|
||||
BGE_API_URL = os.getenv("BGE_API_URL", "http://192.168.1.243:8001/embed")
|
||||
|
||||
VISION_MODEL = "minicpm-v"
|
||||
|
||||
DEFAULT_TEXT_MODEL = "glm-4.6:cloud"
|
||||
MINIMAX_MODEL = "minimax-m2.1:cloud"
|
||||
|
||||
MODEL_CHOICES = [
|
||||
DEFAULT_TEXT_MODEL,
|
||||
MINIMAX_MODEL,
|
||||
"llama3.2",
|
||||
"mistral",
|
||||
"qwen2.5-coder:32b",
|
||||
]
|
||||
|
||||
WORKSPACES_DIR = "./workspaces"
|
||||
STORAGE_DIR = "./.files"
|
||||
|
||||
os.makedirs(STORAGE_DIR, exist_ok=True)
|
||||
os.makedirs(WORKSPACES_DIR, exist_ok=True)
|
||||
|
||||
# =========================
|
||||
# USER PROFILES
|
||||
# =========================
|
||||
# === MAPPING UTENTI ===
|
||||
USER_PROFILES = {
|
||||
"giuseppe@defranceschi.pro": {
|
||||
"role": "admin",
|
||||
"name": "Giuseppe",
|
||||
"workspace": "admin_workspace",
|
||||
"rag_collection": "admin_docs",
|
||||
"capabilities": ["debug", "all"],
|
||||
"show_code": True,
|
||||
},
|
||||
"federica.tecchio@gmail.com": {
|
||||
"role": "business",
|
||||
"name": "Federica",
|
||||
"workspace": "business_workspace",
|
||||
"rag_collection": "contabilita",
|
||||
"capabilities": ["basic_chat"],
|
||||
"show_code": False,
|
||||
},
|
||||
"riccardob545@gmail.com": {
|
||||
"role": "engineering",
|
||||
"name": "Riccardo",
|
||||
"workspace": "engineering_workspace",
|
||||
"rag_collection": "engineering_docs",
|
||||
"capabilities": ["code"],
|
||||
"show_code": True,
|
||||
},
|
||||
"giuliadefranceschi05@gmail.com": {
|
||||
"role": "architecture",
|
||||
"name": "Giulia",
|
||||
"workspace": "architecture_workspace",
|
||||
"rag_collection": "architecture_manuals",
|
||||
"capabilities": ["visual"],
|
||||
"show_code": False,
|
||||
},
|
||||
"giuseppe.defranceschi@gmail.com": {
|
||||
"role": "architecture",
|
||||
"name": "Giuseppe",
|
||||
"workspace": "architecture_workspace",
|
||||
"rag_collection": "architecture_manuals",
|
||||
"capabilities": ["visual"],
|
||||
"show_code": False,
|
||||
},
|
||||
"giuseppe@defranceschi.pro": { "role": "admin", "name": "Giuseppe", "workspace": "admin_workspace", "rag_collection": "admin_docs", "capabilities": ["debug", "all"], "show_code": True },
|
||||
"federica.tecchio@gmail.com": { "role": "business", "name": "Federica", "workspace": "business_workspace", "rag_collection": "contabilita", "capabilities": ["basic_chat"], "show_code": False },
|
||||
"riccardob545@gmail.com": { "role": "engineering", "name": "Riccardo", "workspace": "engineering_workspace", "rag_collection": "engineering_docs", "capabilities": ["code"], "show_code": True },
|
||||
"giuliadefranceschi05@gmail.com": { "role": "architecture", "name": "Giulia", "workspace": "architecture_workspace", "rag_collection": "architecture_manuals", "capabilities": ["visual"], "show_code": False },
|
||||
"giuseppe.defranceschi@gmail.com": { "role": "architecture", "name": "Giuseppe", "workspace": "architecture_workspace", "rag_collection": "architecture_manuals", "capabilities": ["visual"], "show_code": False }
|
||||
}
|
||||
|
||||
GUEST_PROFILE = {
|
||||
"role": "guest",
|
||||
"name": "Guest",
|
||||
"workspace": "guest",
|
||||
"rag_collection": "public",
|
||||
"capabilities": ["basic_chat"],
|
||||
"show_code": False,
|
||||
}
|
||||
|
||||
# Sensible defaults per ruolo (S-Tier: thoughtful defaults) [file:3]
|
||||
ROLE_DEFAULTS = {
|
||||
"admin": {
|
||||
"model": DEFAULT_TEXT_MODEL,
|
||||
"top_k": 6,
|
||||
"temperature": 0.3,
|
||||
"rag_enabled": True,
|
||||
"vision_detail": "high",
|
||||
},
|
||||
"engineering": {
|
||||
"model": MINIMAX_MODEL,
|
||||
"top_k": 5,
|
||||
"temperature": 0.3,
|
||||
"rag_enabled": True,
|
||||
"vision_detail": "low",
|
||||
},
|
||||
"business": {
|
||||
"model": DEFAULT_TEXT_MODEL,
|
||||
"top_k": 4,
|
||||
"temperature": 0.2,
|
||||
"rag_enabled": True,
|
||||
"vision_detail": "auto",
|
||||
},
|
||||
"architecture": {
|
||||
"model": DEFAULT_TEXT_MODEL,
|
||||
"top_k": 4,
|
||||
"temperature": 0.3,
|
||||
"rag_enabled": True,
|
||||
"vision_detail": "high",
|
||||
},
|
||||
"guest": {
|
||||
"model": DEFAULT_TEXT_MODEL,
|
||||
"top_k": 3,
|
||||
"temperature": 0.2,
|
||||
"rag_enabled": False,
|
||||
"vision_detail": "auto",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# =========================
|
||||
# STORAGE
|
||||
# =========================
|
||||
# === STORAGE CLIENT ===
|
||||
class LocalStorageClient(BaseStorageClient):
|
||||
def __init__(self, storage_path: str):
|
||||
self.storage_path = storage_path
|
||||
os.makedirs(storage_path, exist_ok=True)
|
||||
|
||||
async def upload_file(
|
||||
self,
|
||||
object_key: str,
|
||||
data: bytes,
|
||||
mime: str = "application/octet-stream",
|
||||
overwrite: bool = True,
|
||||
) -> Dict[str, str]:
|
||||
async def upload_file(self, object_key: str, data: bytes, mime: str = "application/octet-stream", overwrite: bool = True) -> Dict[str, str]:
|
||||
file_path = os.path.join(self.storage_path, object_key)
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(data)
|
||||
with open(file_path, "wb") as f: f.write(data)
|
||||
return {"object_key": object_key, "url": f"/files/{object_key}"}
|
||||
|
||||
async def get_read_url(self, object_key: str) -> str:
|
||||
return f"/files/{object_key}"
|
||||
|
||||
async def get_read_url(self, object_key: str) -> str: return f"/files/{object_key}"
|
||||
async def delete_file(self, object_key: str) -> bool:
|
||||
path = os.path.join(self.storage_path, object_key)
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
return True
|
||||
if os.path.exists(path): os.remove(path); return True
|
||||
return False
|
||||
|
||||
async def close(self):
|
||||
pass
|
||||
|
||||
async def close(self): pass
|
||||
|
||||
@cl.data_layer
|
||||
def get_data_layer():
|
||||
return SQLAlchemyDataLayer(conninfo=DATABASE_URL, storage_provider=LocalStorageClient(STORAGE_DIR))
|
||||
|
||||
|
||||
# =========================
|
||||
# OAUTH
|
||||
# =========================
|
||||
# === OAUTH & UTILS ===
|
||||
@cl.oauth_callback
|
||||
def oauth_callback(
|
||||
provider_id: str,
|
||||
token: str,
|
||||
raw_user_data: Dict[str, str],
|
||||
default_user: cl.User,
|
||||
) -> Optional[cl.User]:
|
||||
def oauth_callback(provider_id: str, token: str, raw_user_data: Dict[str, str], default_user: cl.User) -> Optional[cl.User]:
|
||||
if provider_id == "google":
|
||||
email = raw_user_data.get("email", "").lower()
|
||||
profile = USER_PROFILES.get(email, GUEST_PROFILE)
|
||||
|
||||
default_user.metadata.update(
|
||||
{
|
||||
"role": profile["role"],
|
||||
"workspace": profile["workspace"],
|
||||
"rag_collection": profile["rag_collection"],
|
||||
"show_code": profile["show_code"],
|
||||
"display_name": profile["name"],
|
||||
}
|
||||
)
|
||||
profile = USER_PROFILES.get(email, USER_PROFILES.get("guest", {"role": "guest", "name": "Guest", "workspace": "guest", "rag_collection": "public", "show_code": False}))
|
||||
default_user.metadata.update({"role": profile["role"], "workspace": profile["workspace"], "rag_collection": profile["rag_collection"], "show_code": profile["show_code"], "display_name": profile["name"]})
|
||||
return default_user
|
||||
return default_user
|
||||
|
||||
|
||||
def create_workspace(workspace_name: str) -> str:
|
||||
path = os.path.join(WORKSPACES_DIR, workspace_name)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
# =========================
|
||||
# CORE: DOCLING
|
||||
# =========================
|
||||
# === CORE: DOCLING ===
|
||||
def process_file_with_docling(file_path: str) -> str:
|
||||
try:
|
||||
converter = DocumentConverter()
|
||||
|
|
@ -243,10 +97,7 @@ def process_file_with_docling(file_path: str) -> str:
|
|||
print(f"❌ Docling Error: {e}")
|
||||
return ""
|
||||
|
||||
|
||||
# =========================
|
||||
# CORE: BGE-M3 embeddings
|
||||
# =========================
|
||||
# === CORE: BGE-M3 CLIENT ===
|
||||
def get_bge_embeddings(text: str) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
payload = {"texts": [text[:8000]]}
|
||||
|
|
@ -260,364 +111,353 @@ def get_bge_embeddings(text: str) -> Optional[Dict[str, Any]]:
|
|||
print(f"❌ BGE API Error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
@lru_cache(maxsize=1000)
|
||||
def get_bge_embeddings_cached(text: str):
|
||||
return get_bge_embeddings(text)
|
||||
|
||||
|
||||
# =========================
|
||||
# CORE: QDRANT
|
||||
# =========================
|
||||
# === CORE: QDRANT ===
|
||||
async def ensure_collection(collection_name: str):
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
if not await client.collection_exists(collection_name):
|
||||
await client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config={"dense": VectorParams(size=1024, distance=Distance.COSINE)},
|
||||
sparse_vectors_config={"sparse": SparseVectorParams()},
|
||||
sparse_vectors_config={"sparse": SparseVectorParams()}
|
||||
)
|
||||
|
||||
|
||||
async def index_document(file_name: str, content: str, collection_name: str) -> int:
|
||||
async def index_document(file_name: str, content: str, collection_name: str):
|
||||
await ensure_collection(collection_name)
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
|
||||
chunk_size = 2000
|
||||
overlap = 200
|
||||
points: List[PointStruct] = []
|
||||
|
||||
points = []
|
||||
for i in range(0, len(content), chunk_size - overlap):
|
||||
chunk = content[i : i + chunk_size]
|
||||
embedding_data = get_bge_embeddings(chunk)
|
||||
|
||||
if embedding_data:
|
||||
points.append(
|
||||
PointStruct(
|
||||
points.append(PointStruct(
|
||||
id=str(uuid.uuid4()),
|
||||
vector={"dense": embedding_data["dense"], "sparse": embedding_data["sparse"]},
|
||||
vector={
|
||||
"dense": embedding_data["dense"],
|
||||
"sparse": embedding_data["sparse"]
|
||||
},
|
||||
payload={
|
||||
"file_name": file_name,
|
||||
"content": chunk,
|
||||
"indexed_at": datetime.now().isoformat(),
|
||||
},
|
||||
)
|
||||
)
|
||||
"indexed_at": datetime.now().isoformat()
|
||||
}
|
||||
))
|
||||
|
||||
if points:
|
||||
await client.upsert(collection_name=collection_name, points=points)
|
||||
return len(points)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
async def search_hybrid(query: str, collection_name: str, limit: int = 4) -> str:
|
||||
client = AsyncQdrantClient(url=QDRANT_URL)
|
||||
|
||||
if not await client.collection_exists(collection_name):
|
||||
return ""
|
||||
if not await client.collection_exists(collection_name): return ""
|
||||
|
||||
query_emb = get_bge_embeddings(query)
|
||||
if not query_emb:
|
||||
return ""
|
||||
if not query_emb: return ""
|
||||
|
||||
# CORREZIONE QUI: Usiamo l'oggetto Prefetch importato correttamente
|
||||
results = await client.query_points(
|
||||
collection_name=collection_name,
|
||||
prefetch=[Prefetch(query=query_emb["sparse"], using="sparse", limit=limit * 2)],
|
||||
prefetch=[
|
||||
Prefetch(
|
||||
query=query_emb["sparse"],
|
||||
using="sparse",
|
||||
limit=limit * 2
|
||||
)
|
||||
],
|
||||
query=query_emb["dense"],
|
||||
using="dense",
|
||||
limit=limit,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
context = []
|
||||
for hit in results.points:
|
||||
context.append(f"--- DA {hit.payload['file_name']} ---\n{hit.payload['content']}")
|
||||
|
||||
return "\n\n".join(context)
|
||||
|
||||
# === Caching Embeddings ===
|
||||
@lru_cache(maxsize=1000)
|
||||
def get_bge_embeddings_cached(text: str):
|
||||
"""Cache per query ripetute"""
|
||||
return get_bge_embeddings(text)
|
||||
|
||||
# =========================
|
||||
# UX HELPERS (S-Tier: clarity, consistency)
|
||||
# =========================
|
||||
def role_to_badge_class(role: str) -> str:
|
||||
allowed = {"admin", "engineering", "business", "architecture", "guest"}
|
||||
return f"dfm-badge--{role}" if role in allowed else "dfm-badge--guest"
|
||||
|
||||
|
||||
def build_system_prompt(system_instruction: str, has_rag: bool, has_files: bool) -> str:
|
||||
base = (
|
||||
"Sei un assistente tecnico esperto.\n"
|
||||
"Obiettivo: rispondere in modo chiaro, preciso e operativo.\n"
|
||||
"- Se mancano dettagli, fai 1-2 domande mirate.\n"
|
||||
"- Se scrivi codice, includi snippet piccoli e verificabili.\n"
|
||||
)
|
||||
if has_rag:
|
||||
base += "- Usa il contesto RAG come fonte primaria quando presente.\n"
|
||||
if has_files:
|
||||
base += "- Se sono presenti file/immagini, sfrutta le informazioni estratte.\n"
|
||||
|
||||
if system_instruction.strip():
|
||||
base += "\nIstruzione aggiuntiva (utente): " + system_instruction.strip() + "\n"
|
||||
|
||||
return base
|
||||
|
||||
|
||||
def extract_code_blocks(text: str) -> List[str]:
|
||||
return re.findall(r"```(?:\w+)?\n(.*?)```", text, re.DOTALL)
|
||||
|
||||
|
||||
async def log_metrics(metrics: dict):
|
||||
# Mantieni semplice: stdout (come nella tua versione) [file:6]
|
||||
print("METRICS:", metrics)
|
||||
|
||||
|
||||
# =========================
|
||||
# CHAINLIT HANDLERS
|
||||
# =========================
|
||||
# === CHAINLIT HANDLERS ===
|
||||
@cl.on_chat_start
|
||||
async def start():
|
||||
# 1) Profilo utente
|
||||
# 1. Profilo utente
|
||||
user = cl.user_session.get("user")
|
||||
email = user.identifier if user else "guest"
|
||||
profile = USER_PROFILES.get(email, USER_PROFILES["giuseppe@defranceschi.pro"])
|
||||
|
||||
profile = USER_PROFILES.get(email, GUEST_PROFILE)
|
||||
cl.user_session.set("profile", profile)
|
||||
|
||||
create_workspace(profile["workspace"])
|
||||
|
||||
role = profile.get("role", "guest")
|
||||
defaults = ROLE_DEFAULTS.get(role, ROLE_DEFAULTS["guest"])
|
||||
cl.user_session.set("role_defaults", defaults)
|
||||
# 2. Badge HTML personalizzato
|
||||
role_color = {
|
||||
"admin": "#e74c3c",
|
||||
"engineering": "#3498db",
|
||||
"business": "#2ecc71",
|
||||
"architecture": "#9b59b6",
|
||||
}.get(profile["role"], "#95a5a6")
|
||||
|
||||
# 2) Badge (HTML controllato; stile via CSS)
|
||||
badge_html = f"""
|
||||
<div class="dfm-badge {role_to_badge_class(role)}">
|
||||
<span><b>{profile['name']}</b></span>
|
||||
<span style="opacity:.8">{role.upper()}</span>
|
||||
<span style="opacity:.7">· {profile['workspace']}</span>
|
||||
<div style="background:{role_color}; padding:8px; border-radius:8px; margin-bottom:16px;">
|
||||
👤 <b>{profile['name']}</b> | 🔧 {profile['role'].upper()} | 📁 {profile['workspace']}
|
||||
</div>
|
||||
"""
|
||||
await cl.Message(content=badge_html).send()
|
||||
|
||||
# 3) Settings UI (Clarity + sensible defaults)
|
||||
# 3. Settings UI
|
||||
settings = await cl.ChatSettings(
|
||||
[
|
||||
cl.input_widget.Switch(
|
||||
id="rag_enabled",
|
||||
label="📚 Usa Conoscenza Documenti",
|
||||
initial=bool(defaults["rag_enabled"]),
|
||||
description="Attiva la ricerca nei documenti caricati (consigliato).",
|
||||
),
|
||||
cl.input_widget.Slider(
|
||||
id="top_k",
|
||||
label="Profondità Ricerca (documenti)",
|
||||
initial=int(defaults["top_k"]),
|
||||
label="Numero Documenti RAG",
|
||||
initial=4,
|
||||
min=1,
|
||||
max=10,
|
||||
step=1,
|
||||
description="Più documenti = risposta più completa ma più lenta.",
|
||||
),
|
||||
cl.input_widget.Select(
|
||||
id="model",
|
||||
label="🤖 Modello AI",
|
||||
values=MODEL_CHOICES,
|
||||
initial_value=str(defaults["model"]),
|
||||
),
|
||||
cl.input_widget.Slider(
|
||||
id="temperature",
|
||||
label="Creatività",
|
||||
initial=float(defaults["temperature"]),
|
||||
min=0,
|
||||
max=1,
|
||||
step=0.1,
|
||||
description="Bassa = più precisione (consigliato per codice).",
|
||||
),
|
||||
cl.input_widget.Select(
|
||||
id="vision_detail",
|
||||
label="🔍 Dettaglio Analisi Immagini",
|
||||
label="Dettaglio Analisi Immagini",
|
||||
values=["auto", "low", "high"],
|
||||
initial_value=str(defaults["vision_detail"]),
|
||||
initial_value="auto",
|
||||
),
|
||||
cl.input_widget.TextInput(
|
||||
id="system_instruction",
|
||||
label="✏️ Istruzione Sistema (opzionale)",
|
||||
label="Istruzione Sistema Custom (opzionale)",
|
||||
initial="",
|
||||
placeholder="es: Rispondi con bullet points e includi esempi",
|
||||
description="Personalizza stile/format delle risposte.",
|
||||
placeholder="Es: Rispondi sempre in formato tecnico...",
|
||||
),
|
||||
cl.input_widget.Select(
|
||||
id="model",
|
||||
label="Modello di Ragionamento",
|
||||
values=[DEFAULT_TEXT_MODEL, "llama3.2", "mistral", "qwen2.5-coder:32b"],
|
||||
initial_value=DEFAULT_TEXT_MODEL,
|
||||
),
|
||||
cl.input_widget.Slider(
|
||||
id="temperature",
|
||||
label="Creatività (Temperatura)",
|
||||
initial=0.3,
|
||||
min=0,
|
||||
max=1,
|
||||
step=0.1,
|
||||
),
|
||||
cl.input_widget.Switch(
|
||||
id="rag_enabled",
|
||||
label="Usa Conoscenza Documenti (RAG)",
|
||||
initial=True,
|
||||
),
|
||||
]
|
||||
).send()
|
||||
|
||||
cl.user_session.set("settings", settings)
|
||||
|
||||
# 4. Messaggio iniziale (opzionale)
|
||||
await cl.Message(
|
||||
content=(
|
||||
"✅ Ai Station online.\n"
|
||||
f"• Workspace: `{profile['workspace']}`\n"
|
||||
f"• Default modello: `{defaults['model']}`\n"
|
||||
f"• Vision: `{VISION_MODEL}`"
|
||||
f"🚀 **Vision-RAG Hybrid System Online**\n"
|
||||
f"Utente: {profile['name']} | Workspace: {profile['workspace']}\n"
|
||||
f"Engine: Docling + BGE-M3 + {VISION_MODEL}"
|
||||
)
|
||||
).send()
|
||||
|
||||
|
||||
@cl.on_settings_update
|
||||
async def setupagentsettings(settings):
|
||||
cl.user_session.set("settings", settings)
|
||||
|
||||
await cl.Message(
|
||||
content=(
|
||||
"✅ Impostazioni aggiornate:\n"
|
||||
f"• Modello: `{settings.get('model')}`\n"
|
||||
f"• RAG: {'ON' if settings.get('rag_enabled') else 'OFF'} · top_k={settings.get('top_k')}\n"
|
||||
f"• Creatività: {settings.get('temperature')}\n"
|
||||
f"• Vision detail: `{settings.get('vision_detail')}`"
|
||||
)
|
||||
).send()
|
||||
await cl.Message(f"🚀 **Vision-RAG Hybrid System Online**\nUtente: {profile['name']} | Workspace: {profile['workspace']}\nEngine: Docling + BGE-M3 + {VISION_MODEL}").send()
|
||||
|
||||
@cl.on_settings_update
|
||||
async def setup_agent(settings):
|
||||
cl.user_session.set("settings", settings)
|
||||
await cl.Message(content=f"✅ Impostazioni aggiornate: Modello {settings['model']}").send()
|
||||
|
||||
async def log_metrics(metrics: dict):
|
||||
# Versione minima: log su stdout
|
||||
print("[METRICS]", metrics)
|
||||
|
||||
# In futuro puoi:
|
||||
# - salvarle in Postgres
|
||||
# - mandarle a Prometheus / Grafana
|
||||
# - scriverle su file JSON per analisi settimanale
|
||||
|
||||
# - Resume Chat Handler
|
||||
|
||||
@cl.on_chat_resume
|
||||
async def on_chat_resume(thread: ThreadDict):
|
||||
"""
|
||||
Viene chiamato quando l'utente clicca 'Riprendi' su una chat archiviata.
|
||||
Chainlit carica già i messaggi nella UI, qui puoi solo ripristinare la sessione.
|
||||
"""
|
||||
# Se vuoi, puoi recuperare l'identifier dell’utente dal thread
|
||||
user_identifier = thread.get("userIdentifier")
|
||||
profile = USER_PROFILES.get(user_identifier, GUEST_PROFILE)
|
||||
profile = USER_PROFILES.get(
|
||||
user_identifier,
|
||||
USER_PROFILES["giuseppe@defranceschi.pro"],
|
||||
)
|
||||
cl.user_session.set("profile", profile)
|
||||
create_workspace(profile["workspace"])
|
||||
await cl.Message(content="Bentornato! Riprendiamo da qui.").send()
|
||||
|
||||
# Puoi anche ripristinare eventuale stato custom (es: impostazioni di default)
|
||||
# oppure semplicemente salutare l’utente
|
||||
await cl.Message(
|
||||
content="👋 Bentornato! Possiamo riprendere da questa conversazione."
|
||||
).send()
|
||||
|
||||
@cl.on_message
|
||||
async def main(message: cl.Message):
|
||||
start_time = time.time()
|
||||
|
||||
profile = cl.user_session.get("profile", GUEST_PROFILE)
|
||||
profile = cl.user_session.get("profile")
|
||||
settings = cl.user_session.get("settings", {})
|
||||
|
||||
selected_model = settings.get("model", DEFAULT_TEXT_MODEL)
|
||||
temperature = float(settings.get("temperature", 0.3))
|
||||
rag_enabled = bool(settings.get("rag_enabled", True))
|
||||
top_k = int(settings.get("top_k", 4))
|
||||
vision_detail = settings.get("vision_detail", "auto")
|
||||
system_instruction = (settings.get("system_instruction", "") or "").strip()
|
||||
temperature = settings.get("temperature", 0.3)
|
||||
rag_enabled = settings.get("rag_enabled", True)
|
||||
|
||||
workspace = create_workspace(profile["workspace"])
|
||||
|
||||
# 1) Gestione upload (immagini / pdf / docx)
|
||||
images_for_vision: List[str] = []
|
||||
images_for_vision = []
|
||||
doc_context = ""
|
||||
rag_context = "" # ← la inizializzi qui, così esiste sempre
|
||||
|
||||
# 1. GESTIONE FILE
|
||||
if message.elements:
|
||||
for element in message.elements:
|
||||
file_path = os.path.join(workspace, element.name)
|
||||
shutil.copy(element.path, file_path)
|
||||
|
||||
# Immagini
|
||||
if "image" in (element.mime or ""):
|
||||
if "image" in element.mime:
|
||||
images_for_vision.append(file_path)
|
||||
msg_img = cl.Message(content=f"🖼️ Analizzo immagine `{element.name}` con `{VISION_MODEL}`...")
|
||||
msg_img = cl.Message(
|
||||
content=f"👁️ Analizzo immagine **{element.name}** con {VISION_MODEL}..."
|
||||
)
|
||||
await msg_img.send()
|
||||
|
||||
try:
|
||||
with open(file_path, "rb") as imgfile:
|
||||
imgbytes = imgfile.read()
|
||||
with open(file_path, "rb") as img_file:
|
||||
img_bytes = img_file.read()
|
||||
|
||||
client_sync = ollama.Client(host=OLLAMA_URL)
|
||||
res = client_sync.chat(
|
||||
model=VISION_MODEL,
|
||||
messages=[
|
||||
{
|
||||
messages=[{
|
||||
"role": "user",
|
||||
"content": (
|
||||
"Analizza questa immagine tecnica. "
|
||||
"Trascrivi testi/codici e descrivi diagrammi o tabelle in dettaglio. "
|
||||
f"Dettaglio richiesto: {vision_detail}."
|
||||
"Analizza questa immagine tecnica. Trascrivi testi, codici "
|
||||
"e descrivi diagrammi o tabelle in dettaglio."
|
||||
),
|
||||
"images": [imgbytes],
|
||||
}
|
||||
],
|
||||
"images": [img_bytes],
|
||||
}],
|
||||
)
|
||||
desc = res.get("message", {}).get("content", "")
|
||||
doc_context += f"\n\n## DESCRIZIONE IMMAGINE: {element.name}\n{desc}\n"
|
||||
msg_img.content = f"✅ Immagine analizzata: {desc[:300]}..."
|
||||
await msg_img.update()
|
||||
except Exception as e:
|
||||
msg_img.content = f"❌ Errore analisi immagine: {e}"
|
||||
desc = res["message"]["content"]
|
||||
doc_context += f"\n\n[DESCRIZIONE IMMAGINE {element.name}]:\n{desc}"
|
||||
msg_img.content = f"✅ Immagine analizzata:\n{desc[:200]}..."
|
||||
await msg_img.update()
|
||||
|
||||
# Documenti (pdf/docx)
|
||||
elif element.name.lower().endswith((".pdf", ".docx")):
|
||||
msg_doc = cl.Message(content=f"📄 Leggo `{element.name}` con Docling (tabelle/formule)...")
|
||||
elif element.name.endswith((".pdf", ".docx")):
|
||||
msg_doc = cl.Message(
|
||||
content=f"📄 Leggo **{element.name}** con Docling (tabelle/formule)..."
|
||||
)
|
||||
await msg_doc.send()
|
||||
|
||||
markdown_content = process_file_with_docling(file_path)
|
||||
if markdown_content:
|
||||
chunks = await index_document(element.name, markdown_content, profile["rag_collection"])
|
||||
doc_context += f"\n\n## CONTENUTO FILE: {element.name}\n{markdown_content[:2000]}\n"
|
||||
msg_doc.content = f"✅ `{element.name}` convertito e indicizzato ({chunks} chunks)."
|
||||
chunks = await index_document(
|
||||
element.name, markdown_content, profile["rag_collection"]
|
||||
)
|
||||
msg_doc.content = (
|
||||
f"✅ **{element.name}**: Convertito e salvato {chunks} "
|
||||
"frammenti nel DB vettoriale."
|
||||
)
|
||||
doc_context += (
|
||||
f"\n\n[CONTENUTO FILE {element.name}]:\n"
|
||||
f"{markdown_content[:1000]}..."
|
||||
)
|
||||
else:
|
||||
msg_doc.content = f"❌ Errore lettura `{element.name}`."
|
||||
msg_doc.content = f"❌ Errore lettura {element.name}"
|
||||
await msg_doc.update()
|
||||
|
||||
# 2) RAG retrieval (solo se attivo e senza immagini-only flow)
|
||||
rag_context = ""
|
||||
# 2. RAG RETRIEVAL
|
||||
if rag_enabled and not images_for_vision:
|
||||
rag_context = await search_hybrid(message.content, profile["rag_collection"], limit=top_k)
|
||||
|
||||
has_rag = bool(rag_context.strip())
|
||||
has_files = bool(doc_context.strip())
|
||||
|
||||
# 3) Prompt building
|
||||
system_prompt = build_system_prompt(system_instruction, has_rag=has_rag, has_files=has_files)
|
||||
rag_context = await search_hybrid(
|
||||
message.content, profile["rag_collection"]
|
||||
)
|
||||
|
||||
final_context = ""
|
||||
if has_rag:
|
||||
final_context += "\n\n# CONTESTO RAG\n" + rag_context
|
||||
if has_files:
|
||||
final_context += "\n\n# CONTESTO FILE SESSIONE\n" + doc_context
|
||||
if rag_context:
|
||||
final_context += f"CONTESTO RAG:\n{rag_context}\n"
|
||||
if doc_context:
|
||||
final_context += f"CONTESTO SESSIONE CORRENTE:\n{doc_context}\n"
|
||||
|
||||
system_prompt = (
|
||||
"Sei un assistente tecnico esperto. Usa il contesto fornito "
|
||||
"(incluso Markdown di tabelle e descrizioni immagini) per "
|
||||
"rispondere con precisione. Cita i documenti fonte."
|
||||
)
|
||||
|
||||
# 4) Generazione (stream)
|
||||
msg = cl.Message(content="")
|
||||
await msg.send()
|
||||
|
||||
error: Optional[str] = None
|
||||
error = None
|
||||
|
||||
# 3. GENERAZIONE
|
||||
try:
|
||||
client_async = ollama.AsyncClient(host=OLLAMA_URL)
|
||||
stream = await client_async.chat(
|
||||
model=selected_model,
|
||||
messages=[
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": f"Domanda: {message.content}\n{final_context}"},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Domanda: {message.content}\n\n{final_context}",
|
||||
},
|
||||
],
|
||||
options={"temperature": temperature},
|
||||
stream=True,
|
||||
)
|
||||
async for chunk in stream:
|
||||
content = chunk.get("message", {}).get("content", "")
|
||||
if content:
|
||||
await msg.stream_token(content)
|
||||
|
||||
async for chunk in stream:
|
||||
content = chunk["message"]["content"]
|
||||
await msg.stream_token(content)
|
||||
await msg.update()
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
await msg.stream_token(f"\n\n❌ Errore AI: {error}")
|
||||
await msg.stream_token(f"❌ Errore AI: {error}")
|
||||
await msg.update()
|
||||
|
||||
# 5) Salvataggio code blocks (solo per profili con show_code)
|
||||
if profile.get("show_code", False) and msg.content:
|
||||
codeblocks = extract_code_blocks(msg.content)
|
||||
if codeblocks:
|
||||
for i, code in enumerate(codeblocks):
|
||||
# 4. SALVATAGGIO CODICE
|
||||
if profile["show_code"]:
|
||||
code_blocks = re.findall(r"``````", msg.content, re.DOTALL)
|
||||
if code_blocks:
|
||||
for i, code in enumerate(code_blocks):
|
||||
fname = f"script_{datetime.now().strftime('%H%M%S')}_{i}.py"
|
||||
try:
|
||||
with open(os.path.join(workspace, fname), "w", encoding="utf-8") as f:
|
||||
with open(os.path.join(workspace, fname), "w") as f:
|
||||
f.write(code.strip())
|
||||
await cl.Message(content=f"💾 Script salvato: `{fname}`").send()
|
||||
except Exception as e:
|
||||
await cl.Message(content=f"❌ Errore salvataggio `{fname}`: {e}").send()
|
||||
await cl.Message(
|
||||
content=f"💾 Script salvato: `{fname}`"
|
||||
).send()
|
||||
|
||||
# 6) Metriche
|
||||
# 5. METRICHE (ALLA FINE)
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
# Se rag_context è una stringa concatenata, puoi stimare i "rag_hits"
|
||||
# contando i separatori che usi in search_hybrid (es. '--- DA ')
|
||||
if rag_context:
|
||||
rag_hits = rag_context.count("--- DA ")
|
||||
else:
|
||||
rag_hits = 0
|
||||
|
||||
metrics = {
|
||||
"response_time": elapsed,
|
||||
"rag_hits": rag_context.count("--- DA ") if rag_context else 0,
|
||||
"rag_hits": rag_hits,
|
||||
"model": selected_model,
|
||||
"user_role": profile.get("role", "unknown"),
|
||||
"user_role": profile["role"],
|
||||
"error": error,
|
||||
}
|
||||
|
||||
await log_metrics(metrics)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,344 @@
|
|||
import os
|
||||
import re
|
||||
import uuid
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, List
|
||||
import chainlit as cl
|
||||
import ollama
|
||||
import fitz # PyMuPDF
|
||||
from qdrant_client import AsyncQdrantClient
|
||||
from qdrant_client.models import PointStruct, Distance, VectorParams
|
||||
from chainlit.data.sql_alchemy import SQLAlchemyDataLayer
|
||||
|
||||
# === FIX IMPORT ROBUSTO ===
|
||||
# Gestisce le differenze tra le versioni di Chainlit 2.x
|
||||
try:
|
||||
from chainlit.data.storage_clients import BaseStorageClient
|
||||
except ImportError:
|
||||
try:
|
||||
from chainlit.data.base import BaseStorageClient
|
||||
except ImportError:
|
||||
from chainlit.data.storage_clients.base import BaseStorageClient
|
||||
|
||||
# === CONFIGURAZIONE ===
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station")
|
||||
OLLAMA_URL = os.getenv("OLLAMA_URL", "http://192.168.1.243:11434")
|
||||
QDRANT_URL = os.getenv("QDRANT_URL", "http://qdrant:6333")
|
||||
WORKSPACES_DIR = "./workspaces"
|
||||
STORAGE_DIR = "./.files"
|
||||
|
||||
os.makedirs(STORAGE_DIR, exist_ok=True)
|
||||
os.makedirs(WORKSPACES_DIR, exist_ok=True)
|
||||
|
||||
# === MAPPING UTENTI E RUOLI ===
|
||||
USER_PROFILES = {
|
||||
"giuseppe@defranceschi.pro": {
|
||||
"role": "admin",
|
||||
"name": "Giuseppe",
|
||||
"workspace": "admin_workspace",
|
||||
"rag_collection": "admin_docs",
|
||||
"capabilities": ["debug", "system_prompts", "user_management", "all_models"],
|
||||
"show_code": True
|
||||
},
|
||||
"federica.tecchio@gmail.com": {
|
||||
"role": "business",
|
||||
"name": "Federica",
|
||||
"workspace": "business_workspace",
|
||||
"rag_collection": "contabilita",
|
||||
"capabilities": ["pdf_upload", "basic_chat"],
|
||||
"show_code": False
|
||||
},
|
||||
"giuseppe.defranceschi@gmail.com": {
|
||||
"role": "admin",
|
||||
"name": "Giuseppe",
|
||||
"workspace": "admin_workspace",
|
||||
"rag_collection": "admin_docs",
|
||||
"capabilities": ["debug", "system_prompts", "user_management", "all_models"],
|
||||
"show_code": True
|
||||
},
|
||||
"riccardob545@gmail.com": {
|
||||
"role": "engineering",
|
||||
"name": "Riccardo",
|
||||
"workspace": "engineering_workspace",
|
||||
"rag_collection": "engineering_docs",
|
||||
"capabilities": ["code_execution", "data_viz", "advanced_chat"],
|
||||
"show_code": True
|
||||
},
|
||||
"giuliadefranceschi05@gmail.com": {
|
||||
"role": "architecture",
|
||||
"name": "Giulia",
|
||||
"workspace": "architecture_workspace",
|
||||
"rag_collection": "architecture_manuals",
|
||||
"capabilities": ["visual_chat", "pdf_upload", "image_gen"],
|
||||
"show_code": False
|
||||
}
|
||||
}
|
||||
|
||||
# === CUSTOM LOCAL STORAGE CLIENT (FIXED) ===# Questa classe ora implementa tutti i metodi astratti richiesti da Chainlit 2.8.3
|
||||
class LocalStorageClient(BaseStorageClient):
|
||||
"""Storage locale su filesystem per file/elementi"""
|
||||
|
||||
def __init__(self, storage_path: str):
|
||||
self.storage_path = storage_path
|
||||
os.makedirs(storage_path, exist_ok=True)
|
||||
|
||||
async def upload_file(
|
||||
self,
|
||||
object_key: str,
|
||||
data: bytes,
|
||||
mime: str = "application/octet-stream",
|
||||
overwrite: bool = True,
|
||||
) -> Dict[str, str]:
|
||||
file_path = os.path.join(self.storage_path, object_key)
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(data)
|
||||
return {"object_key": object_key, "url": f"/files/{object_key}"}
|
||||
|
||||
# Implementazione metodi obbligatori mancanti nella versione precedente
|
||||
async def get_read_url(self, object_key: str) -> str:
|
||||
return f"/files/{object_key}"
|
||||
|
||||
async def delete_file(self, object_key: str) -> bool:
|
||||
file_path = os.path.join(self.storage_path, object_key)
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def close(self):
|
||||
pass
|
||||
|
||||
# === DATA LAYER ===
|
||||
@cl.data_layer
|
||||
def get_data_layer():
|
||||
return SQLAlchemyDataLayer(
|
||||
conninfo=DATABASE_URL,
|
||||
user_thread_limit=1000,
|
||||
storage_provider=LocalStorageClient(storage_path=STORAGE_DIR)
|
||||
)
|
||||
|
||||
# === OAUTH CALLBACK ===
|
||||
@cl.oauth_callback
|
||||
def oauth_callback(
|
||||
provider_id: str,
|
||||
token: str,
|
||||
raw_user_data: Dict[str, str],
|
||||
default_user: cl.User,
|
||||
) -> Optional[cl.User]:
|
||||
if provider_id == "google":
|
||||
email = raw_user_data.get("email", "").lower()
|
||||
|
||||
# Verifica se utente è autorizzato (opzionale: blocca se non in lista)
|
||||
# if email not in USER_PROFILES:
|
||||
# return None
|
||||
|
||||
# Recupera profilo o usa default Guest
|
||||
profile = USER_PROFILES.get(email, get_user_profile("guest"))
|
||||
|
||||
default_user.metadata.update({
|
||||
"picture": raw_user_data.get("picture", ""),
|
||||
"role": profile["role"],
|
||||
"workspace": profile["workspace"],
|
||||
"rag_collection": profile["rag_collection"],
|
||||
"capabilities": profile["capabilities"],
|
||||
"show_code": profile["show_code"],
|
||||
"display_name": profile["name"]
|
||||
})
|
||||
return default_user
|
||||
return default_user
|
||||
|
||||
# === UTILITY FUNCTIONS ===
|
||||
def get_user_profile(user_email: str) -> Dict:
|
||||
return USER_PROFILES.get(user_email.lower(), {
|
||||
"role": "guest",
|
||||
"name": "Ospite",
|
||||
"workspace": "guest_workspace",
|
||||
"rag_collection": "documents",
|
||||
"capabilities": [],
|
||||
"show_code": False
|
||||
})
|
||||
|
||||
def create_workspace(workspace_name: str) -> str:
|
||||
path = os.path.join(WORKSPACES_DIR, workspace_name)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return path
|
||||
|
||||
def save_code_to_file(code: str, workspace: str) -> str:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
file_name = f"code_{timestamp}.py"
|
||||
file_path = os.path.join(WORKSPACES_DIR, workspace, file_name)
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(code)
|
||||
return file_path
|
||||
|
||||
def extract_text_from_pdf(pdf_path: str) -> str:
|
||||
try:
|
||||
doc = fitz.open(pdf_path)
|
||||
text = "\n".join([page.get_text() for page in doc])
|
||||
doc.close()
|
||||
return text
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
# === QDRANT FUNCTIONS ===
|
||||
async def get_qdrant_client() -> AsyncQdrantClient:
|
||||
return AsyncQdrantClient(url=QDRANT_URL)
|
||||
|
||||
async def ensure_collection(collection_name: str):
|
||||
client = await get_qdrant_client()
|
||||
if not await client.collection_exists(collection_name):
|
||||
await client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config=VectorParams(size=768, distance=Distance.COSINE)
|
||||
)
|
||||
|
||||
async def get_embeddings(text: str) -> list:
|
||||
client = ollama.Client(host=OLLAMA_URL)
|
||||
try:
|
||||
response = client.embed(model='nomic-embed-text', input=text[:2000])
|
||||
if 'embeddings' in response: return response['embeddings'][0]
|
||||
return response.get('embedding', [])
|
||||
except: return []
|
||||
|
||||
async def index_document(file_name: str, content: str, collection_name: str) -> bool:
|
||||
try:
|
||||
await ensure_collection(collection_name)
|
||||
embedding = await get_embeddings(content)
|
||||
if not embedding: return False
|
||||
|
||||
qdrant = await get_qdrant_client()
|
||||
await qdrant.upsert(
|
||||
collection_name=collection_name,
|
||||
points=[PointStruct(
|
||||
id=str(uuid.uuid4()),
|
||||
vector=embedding,
|
||||
payload={"file_name": file_name, "content": content[:3000], "indexed_at": datetime.now().isoformat()}
|
||||
)]
|
||||
)
|
||||
return True
|
||||
except: return False
|
||||
|
||||
async def search_qdrant(query: str, collection: str) -> str:
|
||||
try:
|
||||
client = await get_qdrant_client()
|
||||
if not await client.collection_exists(collection): return ""
|
||||
emb = await get_embeddings(query)
|
||||
if not emb: return ""
|
||||
res = await client.query_points(collection_name=collection, query=emb, limit=3)
|
||||
return "\n\n".join([hit.payload['content'] for hit in res.points if hit.payload])
|
||||
except: return ""
|
||||
|
||||
# === CHAINLIT HANDLERS ===
|
||||
|
||||
@cl.on_chat_start
|
||||
async def on_chat_start():
|
||||
user = cl.user_session.get("user")
|
||||
|
||||
if not user:
|
||||
# Fallback locale se non c'è auth
|
||||
user_email = "guest@local"
|
||||
profile = get_user_profile(user_email)
|
||||
else:
|
||||
user_email = user.identifier
|
||||
# I metadati sono già popolati dalla callback oauth
|
||||
profile = USER_PROFILES.get(user_email, get_user_profile("guest"))
|
||||
|
||||
# Salva in sessione
|
||||
cl.user_session.set("email", user_email)
|
||||
cl.user_session.set("role", profile["role"])
|
||||
cl.user_session.set("workspace", profile["workspace"])
|
||||
cl.user_session.set("rag_collection", profile["rag_collection"])
|
||||
cl.user_session.set("show_code", profile["show_code"])
|
||||
|
||||
create_workspace(profile["workspace"])
|
||||
|
||||
# === SETTINGS WIDGETS ===
|
||||
settings_widgets = [
|
||||
cl.input_widget.Select(
|
||||
id="model",
|
||||
label="Modello AI",
|
||||
values=["glm-4.6:cloud", "llama3.2", "mistral", "qwen2.5-coder:32b"],
|
||||
initial_value="glm-4.6:cloud",
|
||||
),
|
||||
cl.input_widget.Slider(
|
||||
id="temperature",
|
||||
label="Temperatura",
|
||||
initial=0.7, min=0, max=2, step=0.1,
|
||||
),
|
||||
]
|
||||
if profile["role"] == "admin":
|
||||
settings_widgets.append(cl.input_widget.Switch(id="rag_enabled", label="Abilita RAG", initial=True))
|
||||
|
||||
await cl.ChatSettings(settings_widgets).send()
|
||||
|
||||
await cl.Message(
|
||||
content=f"👋 Ciao **{profile['name']}**!\n"
|
||||
f"Ruolo: `{profile['role']}` | Workspace: `{profile['workspace']}`\n"
|
||||
).send()
|
||||
|
||||
@cl.on_settings_update
|
||||
async def on_settings_update(settings):
|
||||
cl.user_session.set("settings", settings)
|
||||
await cl.Message(content="✅ Impostazioni aggiornate").send()
|
||||
|
||||
@cl.on_message
|
||||
async def on_message(message: cl.Message):
|
||||
workspace = cl.user_session.get("workspace")
|
||||
rag_collection = cl.user_session.get("rag_collection")
|
||||
user_role = cl.user_session.get("role")
|
||||
show_code = cl.user_session.get("show_code")
|
||||
|
||||
settings = cl.user_session.get("settings", {})
|
||||
model = settings.get("model", "glm-4.6:cloud")
|
||||
temperature = settings.get("temperature", 0.7)
|
||||
rag_enabled = settings.get("rag_enabled", True) if user_role == "admin" else True
|
||||
|
||||
# 1. GESTIONE FILE
|
||||
if message.elements:
|
||||
for element in message.elements:
|
||||
dest = os.path.join(WORKSPACES_DIR, workspace, element.name)
|
||||
shutil.copy(element.path, dest)
|
||||
if element.name.endswith(".pdf"):
|
||||
text = extract_text_from_pdf(dest)
|
||||
if text:
|
||||
await index_document(element.name, text, rag_collection)
|
||||
await cl.Message(content=f"✅ **{element.name}** indicizzato.").send()
|
||||
|
||||
# 2. RAG
|
||||
context = ""
|
||||
if rag_enabled:
|
||||
context = await search_qdrant(message.content, rag_collection)
|
||||
|
||||
system_prompt = "Sei un assistente esperto."
|
||||
if context: system_prompt += f"\n\nCONTESTO:\n{context}"
|
||||
|
||||
# 3. GENERAZIONE
|
||||
client = ollama.AsyncClient(host=OLLAMA_URL)
|
||||
msg = cl.Message(content="")
|
||||
await msg.send()
|
||||
|
||||
stream = await client.chat(
|
||||
model=model,
|
||||
messages=[{"role": "system", "content": system_prompt}, {"role": "user", "content": message.content}],
|
||||
options={"temperature": temperature},
|
||||
stream=True
|
||||
)
|
||||
|
||||
full_resp = ""
|
||||
async for chunk in stream:
|
||||
token = chunk['message']['content']
|
||||
full_resp += token
|
||||
await msg.stream_token(token)
|
||||
await msg.update()
|
||||
|
||||
# 4. SALVATAGGIO CODICE
|
||||
if show_code:
|
||||
blocks = re.findall(r"``````", full_resp, re.DOTALL)
|
||||
elements = []
|
||||
for code in blocks:
|
||||
path = save_code_to_file(code.strip(), workspace)
|
||||
elements.append(cl.File(name=os.path.basename(path), path=path, display="inline"))
|
||||
if elements:
|
||||
await cl.Message(content="💾 Codice salvato", elements=elements).send()
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
Architecture: x86_64
|
||||
CPU op-mode(s): 32-bit, 64-bit
|
||||
Address sizes: 40 bits physical, 48 bits virtual
|
||||
Byte Order: Little Endian
|
||||
CPU(s): 16
|
||||
On-line CPU(s) list: 0-15
|
||||
Vendor ID: GenuineIntel
|
||||
Model name: QEMU Virtual CPU version 2.5+
|
||||
CPU family: 15
|
||||
Model: 107
|
||||
Thread(s) per core: 1
|
||||
Core(s) per socket: 4
|
||||
Socket(s): 4
|
||||
Stepping: 1
|
||||
BogoMIPS: 4999.99
|
||||
Flags: fpu de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx lm constant_tsc nopl xtopology cpuid tsc_known_freq pni ssse3 cx16 sse4_1 sse4_2 x2apic popcnt aes hypervisor lahf_lm cpuid_fault pti
|
||||
Hypervisor vendor: KVM
|
||||
Virtualization type: full
|
||||
L1d cache: 512 KiB (16 instances)
|
||||
L1i cache: 512 KiB (16 instances)
|
||||
L2 cache: 64 MiB (16 instances)
|
||||
L3 cache: 64 MiB (4 instances)
|
||||
NUMA node(s): 1
|
||||
NUMA node0 CPU(s): 0-15
|
||||
Vulnerability Gather data sampling: Not affected
|
||||
Vulnerability Itlb multihit: KVM: Mitigation: VMX unsupported
|
||||
Vulnerability L1tf: Mitigation; PTE Inversion
|
||||
Vulnerability Mds: Vulnerable: Clear CPU buffers attempted, no microcode; SMT Host state unknown
|
||||
Vulnerability Meltdown: Mitigation; PTI
|
||||
Vulnerability Mmio stale data: Unknown: No mitigations
|
||||
Vulnerability Reg file data sampling: Not affected
|
||||
Vulnerability Retbleed: Not affected
|
||||
Vulnerability Spec rstack overflow: Not affected
|
||||
Vulnerability Spec store bypass: Vulnerable
|
||||
Vulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization
|
||||
Vulnerability Spectre v2: Mitigation; Retpolines; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Retpoline
|
||||
Vulnerability Srbds: Not affected
|
||||
Vulnerability Tsx async abort: Not affected
|
||||
Vulnerability Vmscape: Not affected
|
||||
|
|
@ -0,0 +1,129 @@
|
|||
# S-Tier SaaS Dashboard Design Checklist (Inspired by Stripe, Airbnb, Linear)
|
||||
|
||||
## I. Core Design Philosophy & Strategy
|
||||
|
||||
* [ ] **Users First:** Prioritize user needs, workflows, and ease of use in every design decision.
|
||||
* [ ] **Meticulous Craft:** Aim for precision, polish, and high quality in every UI element and interaction.
|
||||
* [ ] **Speed & Performance:** Design for fast load times and snappy, responsive interactions.
|
||||
* [ ] **Simplicity & Clarity:** Strive for a clean, uncluttered interface. Ensure labels, instructions, and information are unambiguous.
|
||||
* [ ] **Focus & Efficiency:** Help users achieve their goals quickly and with minimal friction. Minimize unnecessary steps or distractions.
|
||||
* [ ] **Consistency:** Maintain a uniform design language (colors, typography, components, patterns) across the entire dashboard.
|
||||
* [ ] **Accessibility (WCAG AA+):** Design for inclusivity. Ensure sufficient color contrast, keyboard navigability, and screen reader compatibility.
|
||||
* [ ] **Opinionated Design (Thoughtful Defaults):** Establish clear, efficient default workflows and settings, reducing decision fatigue for users.
|
||||
|
||||
## II. Design System Foundation (Tokens & Core Components)
|
||||
|
||||
* [ ] **Define a Color Palette:**
|
||||
* [ ] **Primary Brand Color:** User-specified, used strategically.
|
||||
* [ ] **Neutrals:** A scale of grays (5-7 steps) for text, backgrounds, borders.
|
||||
* [ ] **Semantic Colors:** Define specific colors for Success (green), Error/Destructive (red), Warning (yellow/amber), Informational (blue).
|
||||
* [ ] **Dark Mode Palette:** Create a corresponding accessible dark mode palette.
|
||||
* [ ] **Accessibility Check:** Ensure all color combinations meet WCAG AA contrast ratios.
|
||||
* [ ] **Establish a Typographic Scale:**
|
||||
* [ ] **Primary Font Family:** Choose a clean, legible sans-serif font (e.g., Inter, Manrope, system-ui).
|
||||
* [ ] **Modular Scale:** Define distinct sizes for H1, H2, H3, H4, Body Large, Body Medium (Default), Body Small/Caption. (e.g., H1: 32px, Body: 14px/16px).
|
||||
* [ ] **Font Weights:** Utilize a limited set of weights (e.g., Regular, Medium, SemiBold, Bold).
|
||||
* [ ] **Line Height:** Ensure generous line height for readability (e.g., 1.5-1.7 for body text).
|
||||
* [ ] **Define Spacing Units:**
|
||||
* [ ] **Base Unit:** Establish a base unit (e.g., 8px).
|
||||
* [ ] **Spacing Scale:** Use multiples of the base unit for all padding, margins, and layout spacing (e.g., 4px, 8px, 12px, 16px, 24px, 32px).
|
||||
* [ ] **Define Border Radii:**
|
||||
* [ ] **Consistent Values:** Use a small set of consistent border radii (e.g., Small: 4-6px for inputs/buttons; Medium: 8-12px for cards/modals).
|
||||
* [ ] **Develop Core UI Components (with consistent states: default, hover, active, focus, disabled):**
|
||||
* [ ] Buttons (primary, secondary, tertiary/ghost, destructive, link-style; with icon options)
|
||||
* [ ] Input Fields (text, textarea, select, date picker; with clear labels, placeholders, helper text, error messages)
|
||||
* [ ] Checkboxes & Radio Buttons
|
||||
* [ ] Toggles/Switches
|
||||
* [ ] Cards (for content blocks, multimedia items, dashboard widgets)
|
||||
* [ ] Tables (for data display; with clear headers, rows, cells; support for sorting, filtering)
|
||||
* [ ] Modals/Dialogs (for confirmations, forms, detailed views)
|
||||
* [ ] Navigation Elements (Sidebar, Tabs)
|
||||
* [ ] Badges/Tags (for status indicators, categorization)
|
||||
* [ ] Tooltips (for contextual help)
|
||||
* [ ] Progress Indicators (Spinners, Progress Bars)
|
||||
* [ ] Icons (use a single, modern, clean icon set; SVG preferred)
|
||||
* [ ] Avatars
|
||||
|
||||
## III. Layout, Visual Hierarchy & Structure
|
||||
|
||||
* [ ] **Responsive Grid System:** Design based on a responsive grid (e.g., 12-column) for consistent layout across devices.
|
||||
* [ ] **Strategic White Space:** Use ample negative space to improve clarity, reduce cognitive load, and create visual balance.
|
||||
* [ ] **Clear Visual Hierarchy:** Guide the user's eye using typography (size, weight, color), spacing, and element positioning.
|
||||
* [ ] **Consistent Alignment:** Maintain consistent alignment of elements.
|
||||
* [ ] **Main Dashboard Layout:**
|
||||
* [ ] Persistent Left Sidebar: For primary navigation between modules.
|
||||
* [ ] Content Area: Main space for module-specific interfaces.
|
||||
* [ ] (Optional) Top Bar: For global search, user profile, notifications.
|
||||
* [ ] **Mobile-First Considerations:** Ensure the design adapts gracefully to smaller screens.
|
||||
|
||||
## IV. Interaction Design & Animations
|
||||
|
||||
* [ ] **Purposeful Micro-interactions:** Use subtle animations and visual feedback for user actions (hovers, clicks, form submissions, status changes).
|
||||
* [ ] Feedback should be immediate and clear.
|
||||
* [ ] Animations should be quick (150-300ms) and use appropriate easing (e.g., ease-in-out).
|
||||
* [ ] **Loading States:** Implement clear loading indicators (skeleton screens for page loads, spinners for in-component actions).
|
||||
* [ ] **Transitions:** Use smooth transitions for state changes, modal appearances, and section expansions.
|
||||
* [ ] **Avoid Distraction:** Animations should enhance usability, not overwhelm or slow down the user.
|
||||
* [ ] **Keyboard Navigation:** Ensure all interactive elements are keyboard accessible and focus states are clear.
|
||||
|
||||
## V. Specific Module Design Tactics
|
||||
|
||||
### A. Multimedia Moderation Module
|
||||
|
||||
* [ ] **Clear Media Display:** Prominent image/video previews (grid or list view).
|
||||
* [ ] **Obvious Moderation Actions:** Clearly labeled buttons (Approve, Reject, Flag, etc.) with distinct styling (e.g., primary/secondary, color-coding). Use icons for quick recognition.
|
||||
* [ ] **Visible Status Indicators:** Use color-coded Badges for content status (Pending, Approved, Rejected).
|
||||
* [ ] **Contextual Information:** Display relevant metadata (uploader, timestamp, flags) alongside media.
|
||||
* [ ] **Workflow Efficiency:**
|
||||
* [ ] Bulk Actions: Allow selection and moderation of multiple items.
|
||||
* [ ] Keyboard Shortcuts: For common moderation actions.
|
||||
* [ ] **Minimize Fatigue:** Clean, uncluttered interface; consider dark mode option.
|
||||
|
||||
### B. Data Tables Module (Contacts, Admin Settings)
|
||||
|
||||
* [ ] **Readability & Scannability:**
|
||||
* [ ] Smart Alignment: Left-align text, right-align numbers.
|
||||
* [ ] Clear Headers: Bold column headers.
|
||||
* [ ] Zebra Striping (Optional): For dense tables.
|
||||
* [ ] Legible Typography: Simple, clean sans-serif fonts.
|
||||
* [ ] Adequate Row Height & Spacing.
|
||||
* [ ] **Interactive Controls:**
|
||||
* [ ] Column Sorting: Clickable headers with sort indicators.
|
||||
* [ ] Intuitive Filtering: Accessible filter controls (dropdowns, text inputs) above the table.
|
||||
* [ ] Global Table Search.
|
||||
* [ ] **Large Datasets:**
|
||||
* [ ] Pagination (preferred for admin tables) or virtual/infinite scroll.
|
||||
* [ ] Sticky Headers / Frozen Columns: If applicable.
|
||||
* [ ] **Row Interactions:**
|
||||
* [ ] Expandable Rows: For detailed information.
|
||||
* [ ] Inline Editing: For quick modifications.
|
||||
* [ ] Bulk Actions: Checkboxes and contextual toolbar.
|
||||
* [ ] Action Icons/Buttons per Row: (Edit, Delete, View Details) clearly distinguishable.
|
||||
|
||||
### C. Configuration Panels Module (Microsite, Admin Settings)
|
||||
|
||||
* [ ] **Clarity & Simplicity:** Clear, unambiguous labels for all settings. Concise helper text or tooltips for descriptions. Avoid jargon.
|
||||
* [ ] **Logical Grouping:** Group related settings into sections or tabs.
|
||||
* [ ] **Progressive Disclosure:** Hide advanced or less-used settings by default (e.g., behind "Advanced Settings" toggle, accordions).
|
||||
* [ ] **Appropriate Input Types:** Use correct form controls (text fields, checkboxes, toggles, selects, sliders) for each setting.
|
||||
* [ ] **Visual Feedback:** Immediate confirmation of changes saved (e.g., toast notifications, inline messages). Clear error messages for invalid inputs.
|
||||
* [ ] **Sensible Defaults:** Provide default values for all settings.
|
||||
* [ ] **Reset Option:** Easy way to "Reset to Defaults" for sections or entire configuration.
|
||||
* [ ] **Microsite Preview (If Applicable):** Show a live or near-live preview of microsite changes.
|
||||
|
||||
## VI. CSS & Styling Architecture
|
||||
|
||||
* [ ] **Choose a Scalable CSS Methodology:**
|
||||
* [ ] **Utility-First (Recommended for LLM):** e.g., Tailwind CSS. Define design tokens in config, apply via utility classes.
|
||||
* [ ] **BEM with Sass:** If not utility-first, use structured BEM naming with Sass variables for tokens.
|
||||
* [ ] **CSS-in-JS (Scoped Styles):** e.g., Stripe's approach for Elements.
|
||||
* [ ] **Integrate Design Tokens:** Ensure colors, fonts, spacing, radii tokens are directly usable in the chosen CSS architecture.
|
||||
* [ ] **Maintainability & Readability:** Code should be well-organized and easy to understand.
|
||||
* [ ] **Performance:** Optimize CSS delivery; avoid unnecessary bloat.
|
||||
|
||||
## VII. General Best Practices
|
||||
|
||||
* [ ] **Iterative Design & Testing:** Continuously test with users and iterate on designs.
|
||||
* [ ] **Clear Information Architecture:** Organize content and navigation logically.
|
||||
* [ ] **Responsive Design:** Ensure the dashboard is fully functional and looks great on all device sizes (desktop, tablet, mobile).
|
||||
* [ ] **Documentation:** Maintain clear documentation for the design system and components.
|
||||
|
|
@ -0,0 +1,717 @@
|
|||
ai-station-app | [INFO] 2025-12-31 10:20:23,724 [RapidOCR] download_file.py:82: Download size: 25.67MB
|
||||
ai-station-app | [INFO] 2025-12-31 10:20:27,451 [RapidOCR] download_file.py:95: Successfully saved to: /usr/local/lib/python3.11/site-packages/rapidocr/models/ch_PP-OCRv4_rec_infer.pth
|
||||
ai-station-app | [INFO] 2025-12-31 10:20:27,460 [RapidOCR] main.py:50: Using /usr/local/lib/python3.11/site-packages/rapidocr/models/ch_PP-OCRv4_rec_infer.pth
|
||||
ai-station-app | 2025-12-31 10:20:28 - Auto OCR model selected rapidocr with torch.
|
||||
ai-station-app | 2025-12-31 10:20:28 - Loading plugin 'docling_defaults'
|
||||
ai-station-app | 2025-12-31 10:20:28 - Registered layout engines: ['docling_layout_default', 'docling_experimental_table_crops_layout']
|
||||
ai-station-app | 2025-12-31 10:20:28 - Accelerator device: 'cpu'
|
||||
ai-station-app | 2025-12-31 10:20:59 - Loading plugin 'docling_defaults'
|
||||
ai-station-app | 2025-12-31 10:20:59 - Registered table structure engines: ['docling_tableformer']
|
||||
ai-station-app | 2025-12-31 10:22:00 - Accelerator device: 'cpu'
|
||||
ai-station-app | 2025-12-31 10:22:02 - Processing document esempio manuale Omron.pdf
|
||||
ai-station-app | 2025-12-31 10:23:07 - Finished converting document esempio manuale Omron.pdf in 173.75 sec.
|
||||
ai-station-app | 2025-12-31 10:23:07 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
ai-station-app | 2025-12-31 10:23:07 - An unexpected error occurred:
|
||||
ai-station-app | 2025-12-31 10:23:07 - Translation file for it-IT not found. Using default translation en-US.
|
||||
ai-station-qdrant | 2025-12-31T10:23:07.266024Z INFO actix_web::middleware::logger: 172.18.0.4 "GET /collections/admin_docs/exists HTTP/1.1" 200 69 "-" "python-client/1.16.2 python/3.11.14" 0.001423
|
||||
ai-station-app | 2025-12-31 10:23:07 - HTTP Request: GET http://qdrant:6333/collections/admin_docs/exists "HTTP/1.1 200 OK"
|
||||
ai-station-qdrant | 2025-12-31T10:23:07.422109Z INFO storage::content_manager::toc::collection_meta_ops: Creating collection admin_docs
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-426' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-429' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-432' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-434' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-437' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-439' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-441' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-443' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-446' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-448' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-450' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-452' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-454' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-457' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-459' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-461' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-463' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-465' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-467' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-469' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-471' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-474' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-478' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-480' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-482' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-484' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-486' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-488' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-490' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-492' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-494' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-496' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-498' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-501' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-503' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | 2025-12-31 10:23:07 - Task exception was never retrieved
|
||||
ai-station-app | future: <Task finished name='Task-505' coro=<AsyncServer._handle_event_internal() done, defined at /usr/local/lib/python3.11/site-packages/socketio/async_server.py:605> exception=ValueError('Session not found')>
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/asyncio/tasks.py", line 277, in __step
|
||||
ai-station-app | result = coro.send(None)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 607, in _handle_event_internal
|
||||
ai-station-app | r = await server._trigger_event(data[0], namespace, sid, *data[1:])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/socketio/async_server.py", line 634, in _trigger_event
|
||||
ai-station-app | ret = await handler(*args)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/socket.py", line 323, in window_message
|
||||
ai-station-app | session = WebsocketSession.require(sid)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/session.py", line 354, in require
|
||||
ai-station-app | raise ValueError("Session not found")
|
||||
ai-station-app | ValueError: Session not found
|
||||
ai-station-app | post request handler error
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/async_server.py", line 306, in handle_request
|
||||
ai-station-app | await socket.handle_post_request(environ)
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/async_socket.py", line 109, in handle_post_request
|
||||
ai-station-app | p = payload.Payload(encoded_payload=body)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/payload.py", line 13, in __init__
|
||||
ai-station-app | self.decode(encoded_payload)
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/payload.py", line 44, in decode
|
||||
ai-station-app | raise ValueError('Too many packets in payload')
|
||||
ai-station-app | ValueError: Too many packets in payload
|
||||
ai-station-app | 2025-12-31 10:23:07 - post request handler error
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/async_server.py", line 306, in handle_request
|
||||
ai-station-app | await socket.handle_post_request(environ)
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/async_socket.py", line 109, in handle_post_request
|
||||
ai-station-app | p = payload.Payload(encoded_payload=body)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/payload.py", line 13, in __init__
|
||||
ai-station-app | self.decode(encoded_payload)
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/engineio/payload.py", line 44, in decode
|
||||
ai-station-app | raise ValueError('Too many packets in payload')
|
||||
ai-station-app | ValueError: Too many packets in payload
|
||||
ai-station-qdrant | 2025-12-31T10:23:07.898915Z INFO actix_web::middleware::logger: 172.18.0.4 "PUT /collections/admin_docs HTTP/1.1" 200 57 "-" "python-client/1.16.2 python/3.11.14" 0.478796
|
||||
ai-station-app | 2025-12-31 10:23:07 - HTTP Request: PUT http://qdrant:6333/collections/admin_docs "HTTP/1.1 200 OK"
|
||||
ai-station-app | 2025-12-31 10:23:08 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
ai-station-qdrant | 2025-12-31T10:23:12.256007Z INFO actix_web::middleware::logger: 172.18.0.4 "PUT /collections/admin_docs/points?wait=true HTTP/1.1" 200 84 "-" "python-client/1.16.2 python/3.11.14" 0.128378
|
||||
ai-station-app | 2025-12-31 10:23:12 - HTTP Request: PUT http://qdrant:6333/collections/admin_docs/points?wait=true "HTTP/1.1 200 OK"
|
||||
ai-station-app | 2025-12-31 10:23:12 - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
ai-station-qdrant | 2025-12-31T10:23:12.413564Z INFO actix_web::middleware::logger: 172.18.0.4 "GET /collections/admin_docs/exists HTTP/1.1" 200 68 "-" "python-client/1.16.2 python/3.11.14" 0.006418
|
||||
ai-station-app | 2025-12-31 10:23:12 - HTTP Request: GET http://qdrant:6333/collections/admin_docs/exists "HTTP/1.1 200 OK"
|
||||
ai-station-app | 2025-12-31 10:23:12 - module 'chainlit.data' has no attribute 'qdrant_client'
|
||||
ai-station-app | Traceback (most recent call last):
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/utils.py", line 57, in wrapper
|
||||
ai-station-app | return await user_function(**params_values)
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/usr/local/lib/python3.11/site-packages/chainlit/callbacks.py", line 161, in with_parent_id
|
||||
ai-station-app | await func(message)
|
||||
ai-station-app | File "/app/app.py", line 250, in main
|
||||
ai-station-app | rag_context = await search_hybrid(message.content, profile["rag_collection"])
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | File "/app/app.py", line 166, in search_hybrid
|
||||
ai-station-app | cl.data.qdrant_client.models.Prefetch(
|
||||
ai-station-app | ^^^^^^^^^^^^^^^^^^^^^
|
||||
ai-station-app | AttributeError: module 'chainlit.data' has no attribute 'qdrant_client'
|
||||
ai-station-app | 2025-12-31 10:23:28 - Translation file for it-IT not found. Using default translation en-US.
|
||||
ai-station-postgres | 2025-12-31 10:23:35.822 UTC [28] LOG: checkpoint starting: time
|
||||
ai-station-postgres | 2025-12-31 10:23:37.312 UTC [28] LOG: checkpoint complete: wrote 17 buffers (0.1%); 0 WAL file(s) added, 0 removed, 0 recycled; write=1.428 s, sync=0.019 s, total=1.491 s; sync files=11, longest=0.009 s, average=0.002 s; distance=68 kB, estimate=68 kB
|
||||
^C
|
||||
giuseppe@ai-srv:~/ai-station$
|
||||
|
|
@ -10,7 +10,7 @@ services:
|
|||
- DATABASE_URL=postgresql+asyncpg://ai_user:secure_password_here@postgres:5432/ai_station
|
||||
- OLLAMA_URL=http://192.168.1.243:11434
|
||||
- QDRANT_URL=http://qdrant:6333
|
||||
- BGE_API_URL=http://192.168.1.243:8001
|
||||
- BGE_API_URL=http://192.168.1.243:8001/embed
|
||||
volumes:
|
||||
- ./workspaces:/app/workspaces
|
||||
- ./public:/app/public # ⬅️ VERIFICA QUESTO
|
||||
|
|
|
|||
133
error.log
133
error.log
|
|
@ -1,133 +0,0 @@
|
|||
qdrant-1 | _ _
|
||||
chainlit-app-1 | 2025-12-25 18:05:12 - INFO - chainlit - Your app is available at http://0.0.0.0:8000
|
||||
postgres-1 |
|
||||
postgres-1 | PostgreSQL Database directory appears to contain a database; Skipping initialization
|
||||
postgres-1 |
|
||||
postgres-1 | 2025-12-25 16:38:01.071 UTC [1] LOG: starting PostgreSQL 18.1 (Debian 18.1-1.pgdg13+2) on x86_64-pc-linux-gnu, compiled by gcc (Debian 14.2.0-19) 14.2.0, 64-bit
|
||||
qdrant-1 | __ _ __| |_ __ __ _ _ __ | |_
|
||||
qdrant-1 | / _` |/ _` | '__/ _` | '_ \| __|
|
||||
qdrant-1 | | (_| | (_| | | | (_| | | | | |_
|
||||
qdrant-1 | \__, |\__,_|_| \__,_|_| |_|\__|
|
||||
postgres-1 | 2025-12-25 16:38:01.072 UTC [1] LOG: listening on IPv4 address "0.0.0.0", port 5432
|
||||
postgres-1 | 2025-12-25 16:38:01.072 UTC [1] LOG: listening on IPv6 address "::", port 5432
|
||||
postgres-1 | 2025-12-25 16:38:01.093 UTC [1] LOG: listening on Unix socket "/var/run/postgresql/.s.PGSQL.5432"
|
||||
postgres-1 | 2025-12-25 16:38:01.126 UTC [32] LOG: database system was shut down at 2025-12-25 14:34:55 UTC
|
||||
postgres-1 | 2025-12-25 16:38:01.155 UTC [1] LOG: database system is ready to accept connections
|
||||
qdrant-1 | |_|
|
||||
qdrant-1 |
|
||||
qdrant-1 | Access web UI at https://ui.qdrant.tech/?v=v1.0.0
|
||||
qdrant-1 |
|
||||
qdrant-1 | [2025-12-25T16:38:00.816Z INFO storage::content_manager::consensus::persistent] Initializing new raft state at ./storage/raft_state
|
||||
qdrant-1 | [2025-12-25T16:38:00.861Z INFO qdrant] Distributed mode disabled
|
||||
qdrant-1 | [2025-12-25T16:38:00.861Z INFO qdrant] Telemetry reporting enabled, id: e6113e43-627c-471d-8374-0f1b61799d76
|
||||
qdrant-1 | [2025-12-25T16:38:00.872Z INFO qdrant::tonic] Qdrant gRPC listening on 6334
|
||||
qdrant-1 | [2025-12-25T16:38:00.890Z INFO actix_server::builder] Starting 3 workers
|
||||
qdrant-1 | [2025-12-25T16:38:00.890Z INFO actix_server::server] Actix runtime found; starting in Actix runtime
|
||||
qdrant-1 | [2025-12-25T16:39:02.504Z INFO actix_server::server] SIGTERM received; starting graceful shutdown
|
||||
qdrant-1 | [2025-12-25T16:39:02.505Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | [2025-12-25T16:39:02.508Z INFO actix_server::accept] Accept thread stopped
|
||||
qdrant-1 | [2025-12-25T16:39:02.508Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | [2025-12-25T16:39:02.508Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | _ _
|
||||
qdrant-1 | __ _ __| |_ __ __ _ _ __ | |_
|
||||
qdrant-1 | / _` |/ _` | '__/ _` | '_ \| __|
|
||||
qdrant-1 | | (_| | (_| | | | (_| | | | | |_
|
||||
postgres-1 | 2025-12-25 16:39:02.495 UTC [1] LOG: received fast shutdown request
|
||||
postgres-1 | 2025-12-25 16:39:02.505 UTC [1] LOG: aborting any active transactions
|
||||
postgres-1 | 2025-12-25 16:39:02.521 UTC [1] LOG: background worker "logical replication launcher" (PID 35) exited with exit code 1
|
||||
postgres-1 | 2025-12-25 16:39:02.521 UTC [30] LOG: shutting down
|
||||
postgres-1 | 2025-12-25 16:39:02.533 UTC [30] LOG: checkpoint starting: shutdown immediate
|
||||
chainlit-app-1 | 2025-12-25 18:05:25 - INFO - httpx - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | /app/app.py:43: UserWarning: Qdrant client version 1.16.2 is incompatible with server version 1.0.0. Major versions should match and minor version difference must not exceed 1. Set check_compatibility=False to skip version check.
|
||||
chainlit-app-1 | return QdrantClient(url=QDRANT_URL)
|
||||
qdrant-1 | \__, |\__,_|_| \__,_|_| |_|\__|
|
||||
chainlit-app-1 | 2025-12-25 18:05:25 - INFO - httpx - HTTP Request: GET http://qdrant:6333/collections/documents "HTTP/1.1 404 Not Found"
|
||||
chainlit-app-1 | 2025-12-25 18:06:08 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:06:10 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
qdrant-1 | |_|
|
||||
qdrant-1 |
|
||||
qdrant-1 | Access web UI at https://ui.qdrant.tech/?v=v1.0.0
|
||||
qdrant-1 |
|
||||
qdrant-1 | [2025-12-25T16:43:53.592Z INFO storage::content_manager::consensus::persistent] Loading raft state from ./storage/raft_state
|
||||
qdrant-1 | [2025-12-25T16:43:53.612Z INFO qdrant] Distributed mode disabled
|
||||
postgres-1 | 2025-12-25 16:39:02.601 UTC [30] LOG: checkpoint complete: wrote 0 buffers (0.0%), wrote 3 SLRU buffers; 0 WAL file(s) added, 0 removed, 0 recycled; write=0.019 s, sync=0.009 s, total=0.079 s; sync files=2, longest=0.005 s, average=0.005 s; distance=0 kB, estimate=0 kB; lsn=0/1BEF980, redo lsn=0/1BEF980
|
||||
postgres-1 | 2025-12-25 16:39:02.644 UTC [1] LOG: database system is shut down
|
||||
postgres-1 |
|
||||
postgres-1 | PostgreSQL Database directory appears to contain a database; Skipping initialization
|
||||
postgres-1 |
|
||||
postgres-1 | 2025-12-25 16:43:53.946 UTC [1] LOG: starting PostgreSQL 18.1 (Debian 18.1-1.pgdg13+2) on x86_64-pc-linux-gnu, compiled by gcc (Debian 14.2.0-19) 14.2.0, 64-bit
|
||||
postgres-1 | 2025-12-25 16:43:53.947 UTC [1] LOG: listening on IPv4 address "0.0.0.0", port 5432
|
||||
postgres-1 | 2025-12-25 16:43:53.947 UTC [1] LOG: listening on IPv6 address "::", port 5432
|
||||
qdrant-1 | [2025-12-25T16:43:53.612Z INFO qdrant] Telemetry reporting enabled, id: 2a83356a-9770-47d3-a0bd-638f75769522
|
||||
qdrant-1 | [2025-12-25T16:43:53.615Z INFO qdrant::tonic] Qdrant gRPC listening on 6334
|
||||
qdrant-1 | [2025-12-25T16:43:53.616Z INFO actix_server::builder] Starting 3 workers
|
||||
postgres-1 | 2025-12-25 16:43:53.965 UTC [1] LOG: listening on Unix socket "/var/run/postgresql/.s.PGSQL.5432"
|
||||
postgres-1 | 2025-12-25 16:43:53.990 UTC [32] LOG: database system was shut down at 2025-12-25 16:39:02 UTC
|
||||
postgres-1 | 2025-12-25 16:43:54.013 UTC [1] LOG: database system is ready to accept connections
|
||||
postgres-1 | 2025-12-25 16:48:54.089 UTC [30] LOG: checkpoint starting: time
|
||||
postgres-1 | 2025-12-25 16:48:54.175 UTC [30] LOG: checkpoint complete: wrote 0 buffers (0.0%), wrote 3 SLRU buffers; 0 WAL file(s) added, 0 removed, 0 recycled; write=0.036 s, sync=0.009 s, total=0.088 s; sync files=2, longest=0.005 s, average=0.005 s; distance=0 kB, estimate=0 kB; lsn=0/1BEFA88, redo lsn=0/1BEFA30
|
||||
postgres-1 | 2025-12-25 16:56:42.002 UTC [1] LOG: received fast shutdown request
|
||||
postgres-1 | 2025-12-25 16:56:42.018 UTC [1] LOG: aborting any active transactions
|
||||
postgres-1 | 2025-12-25 16:56:42.026 UTC [1] LOG: background worker "logical replication launcher" (PID 35) exited with exit code 1
|
||||
postgres-1 | 2025-12-25 16:56:42.030 UTC [30] LOG: shutting down
|
||||
postgres-1 | 2025-12-25 16:56:42.039 UTC [30] LOG: checkpoint starting: shutdown immediate
|
||||
postgres-1 | 2025-12-25 16:56:42.086 UTC [30] LOG: checkpoint complete: wrote 0 buffers (0.0%), wrote 0 SLRU buffers; 0 WAL file(s) added, 0 removed, 0 recycled; write=0.004 s, sync=0.001 s, total=0.057 s; sync files=0, longest=0.000 s, average=0.000 s; distance=0 kB, estimate=0 kB; lsn=0/1BEFB38, redo lsn=0/1BEFB38
|
||||
postgres-1 | 2025-12-25 16:56:42.131 UTC [1] LOG: database system is shut down
|
||||
postgres-1 |
|
||||
postgres-1 | PostgreSQL Database directory appears to contain a database; Skipping initialization
|
||||
postgres-1 |
|
||||
chainlit-app-1 | 2025-12-25 18:06:10 - WARNING - chainlit - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||
chainlit-app-1 | 2025-12-25 18:06:13 - INFO - chainlit - Missing custom logo. Falling back to default logo.
|
||||
chainlit-app-1 | 2025-12-25 18:06:21 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:06:21 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
qdrant-1 | [2025-12-25T16:43:53.617Z INFO actix_server::server] Actix runtime found; starting in Actix runtime
|
||||
qdrant-1 | [2025-12-25T16:56:42.005Z INFO actix_server::server] SIGTERM received; starting graceful shutdown
|
||||
qdrant-1 | [2025-12-25T16:56:42.006Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | [2025-12-25T16:56:42.006Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | [2025-12-25T16:56:42.007Z INFO actix_server::worker] Shutting down idle worker
|
||||
qdrant-1 | [2025-12-25T16:56:42.007Z INFO actix_server::accept] Accept thread stopped
|
||||
qdrant-1 | _ _
|
||||
qdrant-1 | __ _ __| |_ __ __ _ _ __ | |_
|
||||
qdrant-1 | / _` |/ _` | '__/ _` | '_ \| __|
|
||||
qdrant-1 | | (_| | (_| | | | (_| | | | | |_
|
||||
qdrant-1 | \__, |\__,_|_| \__,_|_| |_|\__|
|
||||
qdrant-1 | |_|
|
||||
qdrant-1 |
|
||||
qdrant-1 | Access web UI at https://ui.qdrant.tech/?v=v1.0.0
|
||||
qdrant-1 |
|
||||
qdrant-1 | [2025-12-25T16:56:52.790Z INFO storage::content_manager::consensus::persistent] Loading raft state from ./storage/raft_state
|
||||
qdrant-1 | [2025-12-25T16:56:52.796Z INFO qdrant] Distributed mode disabled
|
||||
qdrant-1 | [2025-12-25T16:56:52.796Z INFO qdrant] Telemetry reporting enabled, id: f821b8ea-9ee5-497e-a172-dfebf253f7b1
|
||||
qdrant-1 | [2025-12-25T16:56:52.797Z INFO qdrant::tonic] Qdrant gRPC listening on 6334
|
||||
qdrant-1 | [2025-12-25T16:56:52.798Z INFO actix_server::builder] Starting 3 workers
|
||||
qdrant-1 | [2025-12-25T16:56:52.798Z INFO actix_server::server] Actix runtime found; starting in Actix runtime
|
||||
qdrant-1 | [2025-12-25T18:05:25.183Z INFO actix_web::middleware::logger] 172.18.0.4 "GET /collections/documents HTTP/1.1" 404 110 "-" "python-client/1.16.2 python/3.10.19" 0.007704
|
||||
qdrant-1 | [2025-12-25T18:05:30.499Z INFO actix_web::middleware::logger] 172.18.0.4 "PUT /collections/documents HTTP/1.1" 200 71 "-" "python-client/1.16.2 python/3.10.19" 5.311157
|
||||
qdrant-1 | [2025-12-25T18:06:22.662Z INFO actix_web::middleware::logger] 172.18.0.4 "GET /collections/documents HTTP/1.1" 200 413 "-" "python-client/1.16.2 python/3.10.19" 0.005606
|
||||
postgres-1 | 2025-12-25 16:56:43.530 UTC [1] LOG: starting PostgreSQL 18.1 (Debian 18.1-1.pgdg13+2) on x86_64-pc-linux-gnu, compiled by gcc (Debian 14.2.0-19) 14.2.0, 64-bit
|
||||
postgres-1 | 2025-12-25 16:56:43.532 UTC [1] LOG: listening on IPv4 address "0.0.0.0", port 5432
|
||||
postgres-1 | 2025-12-25 16:56:43.532 UTC [1] LOG: listening on IPv6 address "::", port 5432
|
||||
postgres-1 | 2025-12-25 16:56:43.552 UTC [1] LOG: listening on Unix socket "/var/run/postgresql/.s.PGSQL.5432"
|
||||
postgres-1 | 2025-12-25 16:56:43.585 UTC [32] LOG: database system was shut down at 2025-12-25 16:56:42 UTC
|
||||
postgres-1 | 2025-12-25 16:56:43.616 UTC [1] LOG: database system is ready to accept connections
|
||||
postgres-1 | 2025-12-25 17:01:43.645 UTC [30] LOG: checkpoint starting: time
|
||||
postgres-1 | 2025-12-25 17:01:43.712 UTC [30] LOG: checkpoint complete: wrote 0 buffers (0.0%), wrote 3 SLRU buffers; 0 WAL file(s) added, 0 removed, 0 recycled; write=0.019 s, sync=0.009 s, total=0.068 s; sync files=2, longest=0.005 s, average=0.005 s; distance=0 kB, estimate=0 kB; lsn=0/1BEFC40, redo lsn=0/1BEFBE8
|
||||
chainlit-app-1 | 2025-12-25 18:06:21 - WARNING - chainlit - Translated markdown file for it-IT not found. Defaulting to chainlit.md.
|
||||
chainlit-app-1 | 2025-12-25 18:06:22 - INFO - httpx - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:06:22 - INFO - httpx - HTTP Request: GET http://qdrant:6333/collections/documents "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:06:28 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:06:36 - INFO - httpx - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:06:37 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 500 Internal Server Error"
|
||||
chainlit-app-1 | 2025-12-25 18:06:39 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:06:48 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:07:02 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:07:16 - INFO - httpx - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:07:22 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 500 Internal Server Error"
|
||||
chainlit-app-1 | 2025-12-25 18:07:22 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:07:49 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:07:54 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:08:15 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:08:30 - WARNING - chainlit - Translation file for it-IT not found. Using parent translation it.
|
||||
chainlit-app-1 | 2025-12-25 18:08:57 - INFO - httpx - HTTP Request: GET http://qdrant:6333 "HTTP/1.1 200 OK"
|
||||
chainlit-app-1 | 2025-12-25 18:09:03 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/embed "HTTP/1.1 500 Internal Server Error"
|
||||
chainlit-app-1 | 2025-12-25 18:09:03 - INFO - httpx - HTTP Request: POST http://192.168.1.243:11434/api/chat "HTTP/1.1 200 OK"
|
||||
|
|
@ -1,36 +1,12 @@
|
|||
/* dFm AI Station - Perplexity Clean Style */
|
||||
:root {
|
||||
--bg-color: #0B0F1A;
|
||||
--card-color: #161B2C;
|
||||
--accent-color: #6366F1;
|
||||
.user-badge {
|
||||
background: linear-gradient(45deg, #667eea 0%, #764ba2 100%);
|
||||
padding: 4px 12px;
|
||||
border-radius: 12px;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
body { background-color: var(--bg-color) !important; color: #F1F5F9 !important; }
|
||||
|
||||
/* Header e Logo */
|
||||
header {
|
||||
background: rgba(11, 15, 26, 0.8) !important;
|
||||
backdrop-filter: blur(8px);
|
||||
border-bottom: 1px solid #23293F !important;
|
||||
}
|
||||
|
||||
/* Sidebar */
|
||||
.MuiDrawer-paper {
|
||||
background-color: var(--bg-color) !important;
|
||||
border-right: 1px solid #23293F !important;
|
||||
}
|
||||
|
||||
/* Messaggi */
|
||||
div[class*="user"] {
|
||||
background: #1E253A !important;
|
||||
border-radius: 12px !important;
|
||||
border-left: 4px solid var(--accent-color) !important;
|
||||
}
|
||||
|
||||
/* Input Area */
|
||||
form {
|
||||
background: var(--card-color) !important;
|
||||
border-radius: 20px !important;
|
||||
border: 1px solid #334155 !important;
|
||||
box-shadow: 0 4px 20px rgba(0,0,0,0.5) !important;
|
||||
/* Evidenzia codice */
|
||||
.message pre {
|
||||
background: #2d2d2d;
|
||||
border-left: 4px solid #0066CC;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
import httpx
|
||||
import base64
|
||||
import sys
|
||||
|
||||
# Configurazione
|
||||
OLLAMA_URL = "http://192.168.1.243:11434"
|
||||
MODEL = "minicpm-v"
|
||||
|
||||
print(f"👁️ Test Visione su {OLLAMA_URL} con modello {MODEL}...")
|
||||
|
||||
# 1. Controlla se il modello è caricato
|
||||
try:
|
||||
r = httpx.get(f"{OLLAMA_URL}/api/tags")
|
||||
models = [m['name'] for m in r.json()['models']]
|
||||
if MODEL not in str(models):
|
||||
print(f"❌ Errore: Il modello {MODEL} non è stato trovato su Ollama!")
|
||||
sys.exit(1)
|
||||
print(f"✅ Modello {MODEL} trovato.")
|
||||
except Exception as e:
|
||||
print(f"❌ Errore connessione Ollama: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
print("🚀 Tutto pronto per l'implementazione!")
|
||||
Loading…
Reference in New Issue