Compare commits

5 Commits

Author SHA1 Message Date
Samuele E. Locatelli 1a6cb799e1 Fix display + ordinamento, test modelli validi x SQL e altro code completion 2025-09-06 11:07:30 +00:00
Samuele E. Locatelli c33e8f2678 Modifica visualizzazione statistiche/elenco modelli 2025-09-06 07:10:14 +00:00
Samuele E. Locatelli 776030832e Fix visualizzazione statistiche 2025-09-05 17:50:16 +00:00
Samuele E. Locatelli 4c04306d1d Rimozione classi inutilizzate 2025-09-05 17:35:16 +00:00
Samuele Locatelli c7085c72c5 Merge tag 'AddModelSelAndDetails_01' into develop
Update modello in uso
2025-09-05 18:04:20 +02:00
9 changed files with 380 additions and 570 deletions
+37 -8
View File
@@ -2,12 +2,13 @@
import httpx
import json
import asyncio
import time
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import StreamingResponse
from typing import List, Dict, Any
from models.chat import ChatRequest, ChatResponse
from services import redis_service
from services.history_manager import prepare_history
from services.history_manager import prepare_history, track_lm_call, get_lm_stats
from utils.logging import logger
from config import settings
@@ -22,14 +23,15 @@ async def chat_endpoint(payload: ChatRequest):
meta = redis_service.create_session(payload.user_id, payload.message)
session_id = meta["session_id"]
redis_service.save_chat(payload.user_id, session_id, {"role": "user", "content": payload.message})
redis_service.save_chat(payload.user_id, session_id,
{"role": "user", "content": payload.message})
history_to_send = await prepare_history(payload.user_id, session_id)
#model_to_use = payload.model_name or settings.MODEL_NAME
# Recupera modello dalla sessione
session_meta = redis_service.get_session_meta(payload.user_id, session_id)
model_to_use = payload.model_name or session_meta.get("model_name") or settings.MODEL_NAME
start = time.perf_counter()
async with httpx.AsyncClient(timeout=settings.REQUEST_TIMEOUT) as client:
resp = await client.post(
settings.LM_STUDIO_URL,
@@ -37,9 +39,17 @@ async def chat_endpoint(payload: ChatRequest):
)
resp.raise_for_status()
data = resp.json()
elapsed = time.perf_counter() - start
# Traccia la chiamata
try:
track_lm_call(model_to_use, elapsed)
except Exception as e:
logger.exception(f"Errore tracciamento LM Studio: {e}")
reply = data["choices"][0]["message"]["content"]
redis_service.save_chat(payload.user_id, session_id, {"role": "assistant", "content": reply})
redis_service.save_chat(payload.user_id, session_id,
{"role": "assistant", "content": reply})
return ChatResponse(response=reply, session_id=session_id)
@@ -55,15 +65,17 @@ async def chat_stream_endpoint(payload: ChatRequest):
meta = redis_service.create_session(payload.user_id, payload.message)
session_id = meta["session_id"]
redis_service.save_chat(payload.user_id, session_id, {"role": "user", "content": payload.message})
redis_service.save_chat(payload.user_id, session_id,
{"role": "user", "content": payload.message})
history_to_send = await prepare_history(payload.user_id, session_id)
#model_to_use = payload.model_name or settings.MODEL_NAME
# Recupera modello dalla sessione
session_meta = redis_service.get_session_meta(payload.user_id, session_id)
model_to_use = payload.model_name or session_meta.get("model_name") or settings.MODEL_NAME
async def event_generator():
assistant_text = ""
start = time.perf_counter()
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
@@ -103,15 +115,24 @@ async def chat_stream_endpoint(payload: ChatRequest):
logger.exception("Streaming error in /chat-stream")
yield f"event: error\ndata: {str(e)}\n\n"
finally:
elapsed = time.perf_counter() - start
if assistant_text:
redis_service.save_chat(payload.user_id, session_id, {"role": "assistant", "content": assistant_text})
redis_service.save_chat(payload.user_id, session_id,
{"role": "assistant", "content": assistant_text})
# Traccia la chiamata anche per lo stream
try:
track_lm_call(model_to_use, elapsed)
except Exception as e:
logger.exception(f"Errore tracciamento LM Studio: {e}")
headers = {
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no"
}
return StreamingResponse(event_generator(), media_type="text/event-stream", headers=headers)
return StreamingResponse(event_generator(),
media_type="text/event-stream",
headers=headers)
@router.get("/history")
@@ -134,3 +155,11 @@ async def delete_history(
redis_service.clear_chat(user_id, session_id)
return {"status": "cleared"}
# 📊 Endpoint per statistiche LM Studio
@router.get("/lm-stats")
async def lm_stats_endpoint():
"""
Restituisce le statistiche di utilizzo di LM Studio raccolte in Redis.
"""
return get_lm_stats()
-144
View File
@@ -1,144 +0,0 @@
# api/v1/chat.py
import httpx
import json
import asyncio
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import StreamingResponse
from typing import List, Dict, Any, Optional
from models.chat import ChatRequest, ChatResponse
from services import redis_service # now using updated service with session support
from utils.logging import logger
from config import settings
router = APIRouter()
#MAX_HISTORY_LENGTH = 50
MAX_HISTORY_LENGTH = 20
@router.post("/chat", response_model=ChatResponse)
async def chat_endpoint(payload: ChatRequest):
try:
# Create a new session if session_id not provided
session_id = payload.session_id
if not session_id:
meta = redis_service.create_session(payload.user_id, payload.message)
session_id = meta["session_id"]
# Save user message
redis_service.save_chat(payload.user_id, session_id, {"role": "user", "content": payload.message})
history = redis_service.get_chat(payload.user_id, session_id, limit=MAX_HISTORY_LENGTH)
async with httpx.AsyncClient(timeout=settings.REQUEST_TIMEOUT) as client:
resp = await client.post(
settings.LM_STUDIO_URL,
json={"model": settings.MODEL_NAME, "messages": history},
)
resp.raise_for_status()
data = resp.json()
reply = data["choices"][0]["message"]["content"]
# Save assistant message
redis_service.save_chat(payload.user_id, session_id, {"role": "assistant", "content": reply})
# Return normal ChatResponse, but could also include session_id if needed
return ChatResponse(response=reply, session_id=session_id)
except Exception:
logger.exception("Error in /chat endpoint")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/chat-stream")
async def chat_stream_endpoint(payload: ChatRequest):
"""
Streams model output token-by-token using SSE.
"""
session_id = payload.session_id
if not session_id:
meta = redis_service.create_session(payload.user_id, payload.message)
session_id = meta["session_id"]
redis_service.save_chat(payload.user_id, session_id, {"role": "user", "content": payload.message})
history = redis_service.get_chat(payload.user_id, session_id, limit=MAX_HISTORY_LENGTH)
async def event_generator():
assistant_text = ""
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
"POST",
settings.LM_STUDIO_URL,
json={
"model": settings.MODEL_NAME,
"messages": history,
"stream": True
}
) as r:
async for raw_line in r.aiter_lines():
if not raw_line:
continue
line = raw_line if raw_line.startswith("data:") else f"data: {raw_line}"
payload_str = line[len("data: "):].strip()
if payload_str == "[DONE]":
yield "data: [DONE]\n\n"
break
yield f"data: {payload_str}\n\n"
try:
obj = json.loads(payload_str)
choice = obj.get("choices", [{}])[0]
delta = choice.get("delta", {})
piece = delta.get("content") or choice.get("text")
if piece:
assistant_text += piece
except json.JSONDecodeError:
pass
await asyncio.sleep(0)
except Exception as e:
logger.exception("Streaming error in /chat-stream")
yield f"event: error\ndata: {str(e)}\n\n"
finally:
if assistant_text:
redis_service.save_chat(payload.user_id, session_id, {"role": "assistant", "content": assistant_text})
headers = {
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no"
}
return StreamingResponse(event_generator(), media_type="text/event-stream", headers=headers)
@router.get("/history")
async def get_history(
user_id: str = Query(..., description="User ID"),
session_id: str = Query(..., description="Session ID"),
limit: int = Query(MAX_HISTORY_LENGTH, description="Max number of messages to return")
) -> List[Dict[str, Any]]:
"""
Return all history saved for a given user/session.
"""
logger.info(f"[GET /history] user_id={user_id}, session_id={session_id}, limit={limit}")
history = redis_service.get_chat(user_id, session_id, limit=limit)
return history or []
@router.delete("/history")
async def delete_history(
user_id: str = Query(..., description="User ID"),
session_id: str = Query(..., description="Session ID")
):
"""
Clears history for a given user/session.
"""
logger.info(f"[DELETE /history] user_id={user_id}, session_id={session_id}")
redis_service.clear_chat(user_id, session_id)
return {"status": "cleared"}
+74 -9
View File
@@ -1,16 +1,85 @@
# services/history_manager.py
import time
import httpx
from config import settings
from services import redis_service
MAX_HISTORY_TURNS = 10 # ultimi turni da mantenere
SUMMARY_TRIGGER_TURNS = 20 # soglia per fare summarization
MAX_HISTORY_TURNS = 10
SUMMARY_TRIGGER_TURNS = 20
# -------------------------
# STATISTICHE LM STUDIO
# -------------------------
# services/history_manager.py
def track_lm_call(model_name: str, elapsed_seconds: float):
"""
Aggiorna le statistiche di utilizzo LM Studio in Redis, sia globali che per modello.
"""
model_key = model_name.replace(" ", "_") # per sicurezza nelle chiavi Redis
pipe = redis_service.r.pipeline()
# --- Globali ---
pipe.incr("lm:calls:total")
pipe.incr("lm:calls:last_hour")
pipe.expire("lm:calls:last_hour", 3600)
pipe.incr("lm:calls:last_24h")
pipe.expire("lm:calls:last_24h", 86400)
pipe.incrbyfloat("lm:processing_time:total", elapsed_seconds)
# --- Per modello ---
pipe.sadd("lm:models:loaded", model_name)
# Chiamate
pipe.incr(f"lm:model:{model_key}:calls:last_hour")
pipe.expire(f"lm:model:{model_key}:calls:last_hour", 3600)
pipe.incr(f"lm:model:{model_key}:calls:last_24h")
pipe.expire(f"lm:model:{model_key}:calls:last_24h", 86400)
# Tempo totale
pipe.incrbyfloat(f"lm:model:{model_key}:time:total", elapsed_seconds)
pipe.execute()
def get_lm_stats():
"""
Restituisce statistiche globali e per modello.
"""
models = list(redis_service.r.smembers("lm:models:loaded"))
stats_per_model = []
for m in models:
model_key = m.replace(" ", "_")
calls_last_hour = int(redis_service.r.get(f"lm:model:{model_key}:calls:last_hour") or 0)
calls_last_24h = int(redis_service.r.get(f"lm:model:{model_key}:calls:last_24h") or 0)
total_time = float(redis_service.r.get(f"lm:model:{model_key}:time:total") or 0.0)
avg_time = (total_time / calls_last_24h) if calls_last_24h > 0 else 0.0
stats_per_model.append({
"model": m,
"calls_last_hour": calls_last_hour,
"calls_last_24h": calls_last_24h,
"total_time_sec": total_time,
"avg_time_sec": avg_time
})
return {
"global": {
"calls_total": int(redis_service.r.get("lm:calls:total") or 0),
"calls_last_hour": int(redis_service.r.get("lm:calls:last_hour") or 0),
"calls_last_24h": int(redis_service.r.get("lm:calls:last_24h") or 0),
"total_processing_time_sec": float(redis_service.r.get("lm:processing_time:total") or 0.0)
},
"per_model": stats_per_model
}
# -------------------------
# HISTORY
# -------------------------
async def summarize_messages(messages):
"""
Usa LM Studio per riassumere i messaggi in forma compatta (condensed history).
"""
prompt = [
{"role": "system", "content": "Riassumi la seguente conversazione in forma di elenco puntato, mantenendo solo i fatti chiave e le informazioni rilevanti per continuare il dialogo."},
*messages
@@ -26,10 +95,6 @@ async def summarize_messages(messages):
async def prepare_history(user_id: str, session_id: str):
"""
Recupera la history da Redis, applica windowing e summarization se necessario,
e restituisce la lista di messaggi da passare al modello.
"""
full_history = redis_service.get_chat(user_id, session_id, limit=1000)
if len(full_history) > SUMMARY_TRIGGER_TURNS:
-193
View File
@@ -1,193 +0,0 @@
// src/AssistantMessage.jsx
import React, { useState, useEffect } from "react";
import ReactMarkdown from "react-markdown";
import remarkGfm from "remark-gfm";
import remarkMath from "remark-math";
import rehypeKatex from "rehype-katex";
// Prism.js per syntax highlight
import Prism from "prismjs";
import "prismjs/themes/prism.css";
// Linguaggi base
import "prismjs/components/prism-sql";
import "prismjs/components/prism-javascript";
import "prismjs/components/prism-css";
import "prismjs/components/prism-json";
import "prismjs/components/prism-markdown";
import "prismjs/components/prism-csharp";
import "prismjs/components/prism-lua";
import "prismjs/components/prism-c";
import "prismjs/components/prism-cpp";
import "prismjs/components/prism-python";
import "prismjs/components/prism-basic";
import "prismjs/components/prism-javascript";
// Aggiungo alias "vb" che punta a "vbnet"
Prism.languages.vb = Prism.languages.vbnet;
export default function AssistantMessage({
content,
theme,
timestamp,
startedAt,
endedAt,
isFinal
}) {
const [showThink, setShowThink] = useState(false);
const [fadeOut, setFadeOut] = useState(false);
const ts = timestamp ?? endedAt ?? startedAt;
const thinkMatch = content?.match(/<think>([\s\S]*?)<\/think>/i);
const thinkContent = thinkMatch ? thinkMatch[1].trim() : null;
const isComplete = isFinal || Boolean(timestamp || endedAt);
const visibleContent = isComplete
? content?.replace(/<think>[\s\S]*?<\/think>/i, "").trim()
: content;
useEffect(() => {
if (thinkContent && !isComplete) {
setShowThink(true);
setFadeOut(false);
}
if (thinkContent && isComplete) {
setFadeOut(true);
const timer = setTimeout(() => setShowThink(false), 600);
return () => clearTimeout(timer);
}
}, [thinkContent, isComplete]);
return (
<div className="mb-2 text-start">
<div
className={`d-inline-block p-2 rounded ${theme.assistantBg}`}
style={{ maxWidth: "75%" }}
>
{showThink && (
<div
className={`think-block${fadeOut ? " fade-out" : ""}`}
style={{
fontStyle: "italic",
opacity: 0.7,
marginBottom: "0.5rem",
whiteSpace: "pre-wrap"
}}
>
🤔 {thinkContent}
</div>
)}
<ReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeKatex]}
components={{
table: (props) => (
<table className="table table-sm table-bordered" {...props} />
),
th: (props) => <th className="bg-light" {...props} />,
code: CodeWithCopy
}}
>
{visibleContent}
</ReactMarkdown>
</div>
{ts != null && (
<div
style={{
fontSize: "0.75rem",
color: "#666",
marginTop: "0.2rem",
marginLeft: "0.25rem"
}}
>
{formatDateTime(ts)}
</div>
)}
</div>
);
}
function CodeWithCopy({ inline, className = "", children, ...props }) {
const [copied, setCopied] = useState(false);
const codeText = String(children).replace(/\n$/, "");
const isFencedBlock = !inline && /^language-/.test(className);
// Evidenziazione con Prism
useEffect(() => {
if (isFencedBlock) {
Prism.highlightAll();
}
}, [codeText, isFencedBlock]);
if (!isFencedBlock) {
return (
<code className={className} {...props}>
{children}
</code>
);
}
const handleCopy = async () => {
try {
await navigator.clipboard.writeText(codeText);
setCopied(true);
setTimeout(() => setCopied(false), 1500);
} catch (err) {
console.error("Copy failed", err);
}
};
return (
<div style={{ position: "relative" }}>
<pre className={className} {...props} style={{ paddingRight: "2rem" }}>
<code className={className}>{codeText}</code>
</pre>
<button
onClick={handleCopy}
style={{
position: "absolute",
top: "0.25rem",
right: "0.25rem",
border: "none",
background: "transparent",
cursor: "pointer"
}}
className="btn btn-copy shadow"
title="Copy to clipboard"
>
📋
</button>
{copied && (
<span
style={{
position: "absolute",
top: "0.25rem",
right: "2rem",
fontSize: "0.8rem",
color: "green"
}}
>
Copied!
</span>
)}
</div>
);
}
function formatDateTime(dateTime) {
const date = dateTime instanceof Date ? dateTime : new Date(dateTime);
if (Number.isNaN(date.getTime())) return String(dateTime);
return date.toLocaleString(undefined, {
year: "numeric",
month: "short",
day: "numeric",
hour: "2-digit",
minute: "2-digit",
second: "2-digit"
});
}
+2 -2
View File
@@ -73,9 +73,9 @@ export default function ChatHeader({
{/* Center column: titolo solo se c'è sessione */}
<div className="col-5 text-center">
<h4 className="mb-0" title={`Default model: ${defaultModelName}`}>
🤖 EgalWare&apos;s LLM ChatBot
🤖 EgalWare&apos;s ChatBot
<button className="ms-2 btn btn-sm btn-outline-info" onClick={onToggleModels}>
📊 Modelli
📊 LLM Models
</button>
</h4>
</div>
+45 -45
View File
@@ -70,56 +70,56 @@ export default function ChatLayout({
onToggleModels={toggleModelsDetail}
sessionId={sessionId}
sessionName={sessionName}
defaultModelName={defaultModelName}
defaultModelName={defaultModelName}
/>
{showModelPage ? (
<ModelOverview onBackToChat={() => setShowModelPage(false)} />
) : (
/* AREA SCROLLABILE */
<div className="flex-grow-1 overflow-auto" style={{ minHeight: 0 }}>
<ChatWindow messages={messages} loading={loading} theme={theme} />
</div>
)}
{/* INPUT SEMPRE IN BASSO */}
{!showModelPage && (
sessionId ? (
<ChatInput
onSend={(message, modelName) => onSend(message, modelName)}
onStop={onStop}
loading={loading}
sessionModelName={sessionModelName || ""}
/>
) : (
<NoSessionBox onCreateSession={onCreateSession} />
)
)}
<div className="flex-grow-1 overflow-auto" style={{ minHeight: 0 }}>
{showModelPage ? (
<ModelOverview onBackToChat={() => setShowModelPage(false)} />
) : (
/* AREA SCROLLABILE */
<ChatWindow messages={messages} loading={loading} theme={theme} />
)}
</div>
{/* PANEL SESSIONI */}
<div
className={`offcanvas offcanvas-start ${showSessionsPanel ? "show" : ""}`}
tabIndex="-1"
style={{ visibility: showSessionsPanel ? "visible" : "hidden" }}
>
<div className="offcanvas-header">
<h5 className="offcanvas-title">Manage Sessions</h5>
<button
type="button"
className="btn-close text-reset"
onClick={closeSessionManager}
></button>
</div>
<div className="offcanvas-body">
<SessionTable
userId={userId}
onSelectSession={onSelectSession}
onClosePanel={closeSessionManager}
/>
</div>
{/* INPUT SEMPRE IN BASSO */}
{!showModelPage && (
sessionId ? (
<ChatInput
onSend={(message, modelName) => onSend(message, modelName)}
onStop={onStop}
loading={loading}
sessionModelName={sessionModelName || ""}
/>
) : (
<NoSessionBox onCreateSession={onCreateSession} />
)
)}
</div>
{/* PANEL SESSIONI */}
<div
className={`offcanvas offcanvas-start ${showSessionsPanel ? "show" : ""}`}
tabIndex="-1"
style={{ visibility: showSessionsPanel ? "visible" : "hidden" }}
>
<div className="offcanvas-header">
<h5 className="offcanvas-title">Manage Sessions</h5>
<button
type="button"
className="btn-close text-reset"
onClick={closeSessionManager}
></button>
</div>
</>
<div className="offcanvas-body">
<SessionTable
userId={userId}
onSelectSession={onSelectSession}
onClosePanel={closeSessionManager}
/>
</div>
</div>
</>
);
}
+101
View File
@@ -0,0 +1,101 @@
// src/LmStats.jsx
import React, { useEffect, useState } from "react";
export default function LmStats() {
const [stats, setStats] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
useEffect(() => {
async function loadStats() {
try {
const res = await fetch("/v1/lm-stats"); // Adatta se API su dominio diverso
if (!res.ok) throw new Error(`HTTP ${res.status}`);
const data = await res.json();
setStats(data);
} catch (err) {
console.error("Errore caricamento stats:", err);
setError(err.message);
} finally {
setLoading(false);
}
}
loadStats();
}, []);
if (loading) {
return <div className="alert alert-info">Caricamento statistiche...</div>;
}
if (error) {
return <div className="alert alert-danger">Errore: {error}</div>;
}
if (!stats) {
return <div className="alert alert-warning">Nessun dato disponibile</div>;
}
return (
<div className="container my-4">
<h2 className="mb-4">📊 Statistiche LM Studio</h2>
{/* Statistiche globali */}
<div className="card mb-4">
<div className="card-header bg-primary text-white">
🌍 Globali
</div>
<div className="card-body p-0">
<table className="table table-striped mb-0">
<thead className="table-light">
<tr>
<th>Chiamate totali</th>
<th>Ultima ora</th>
<th>Ultime 24h</th>
<th>Tempo totale (s)</th>
</tr>
</thead>
<tbody>
<tr>
<td>{stats.global.calls_total}</td>
<td>{stats.global.calls_last_hour}</td>
<td>{stats.global.calls_last_24h}</td>
<td>{stats.global.total_processing_time_sec.toFixed(2)}</td>
</tr>
</tbody>
</table>
</div>
</div>
{/* Statistiche per modello */}
<div className="card">
<div className="card-header bg-success text-white">
🧠 Per Modello
</div>
<div className="card-body p-0">
<table className="table table-hover mb-0">
<thead className="table-light">
<tr>
<th>Modello</th>
<th>Ultima ora</th>
<th>Ultime 24h</th>
<th>Tempo totale (s)</th>
<th>Tempo medio (s)</th>
</tr>
</thead>
<tbody>
{stats.per_model.map((m) => (
<tr key={m.model}>
<td>{m.model}</td>
<td>{m.calls_last_hour}</td>
<td>{m.calls_last_24h}</td>
<td>{m.total_time_sec.toFixed(2)}</td>
<td>{m.avg_time_sec.toFixed(2)}</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
</div>
);
}
+121 -63
View File
@@ -1,12 +1,25 @@
// src/ModelOverview.jsx
import React, { useEffect, useState } from "react";
import LmStats from "./LmStats";
export default function ModelOverview({ onBackToChat }) {
const [defaultModel, setDefaultModel] = useState("");
const [modelInfo, setModelInfo] = useState([]);
const [searchTerm, setSearchTerm] = useState("");
const [loading, setLoading] = useState(false);
const [showStats, setShowStats] = useState(false);
const [sortColumn, setSortColumn] = useState("name");
const [sortDirection, setSortDirection] = useState("asc");
const handleSort = (column) => {
if (sortColumn === column) {
setSortDirection((prev) => (prev === "asc" ? "desc" : "asc"));
} else {
setSortColumn(column);
setSortDirection("asc");
}
};
const fetchModelData = async () => {
setLoading(true);
@@ -45,77 +58,122 @@ export default function ModelOverview({ onBackToChat }) {
fetchModelData();
}, []);
const filteredModels = modelInfo.filter((model) =>
const filteredModels = modelInfo
.filter((model) =>
model.name.toLowerCase().includes(searchTerm.toLowerCase())
);
)
.sort((a, b) => {
const valA = a[sortColumn] || "";
const valB = b[sortColumn] || "";
if (typeof valA === "string" && typeof valB === "string") {
return sortDirection === "asc"
? valA.localeCompare(valB)
: valB.localeCompare(valA);
}
return 0;
});
return (
<div className="container py-4">
{/* Jumbotron con layout a due colonne */}
<div className="bg-light p-4 rounded shadow-sm mb-4">
<h3 className="mb-0">📌 Modello di default</h3>
<p className="lead text-muted mb-0">
{defaultModel || "Non disponibile"}
</p>
<div className="row align-items-center">
{/* Sinistra: modello di default */}
<div className="col-6 col-md-4">
<h3 className="mb-2">📌 Modello di default</h3>
<p className="lead text-muted border border-info border-3 rounded-3 mb-0 text-center shadow">
{defaultModel || "Non disponibile"}
</p>
</div>
{/* Destra: pulsanti su due righe */}
<div className="col-6 col-md-8 text-md-end mt-3 mt-md-0">
<div className="d-grid gap-2 d-md-flex justify-content-md-end mb-2">
<button
className={`btn ${showStats ? "btn-secondary" : "btn-outline-secondary"}`}
onClick={() => setShowStats(prev => !prev)}
>
{showStats ? "📋 Modelli Disponibili" : "📊 Statistiche Uso"}
</button>
{/* </div>
<div className="d-grid gap-2 d-md-flex justify-content-md-end"> */}
<button className="btn btn-outline-primary" onClick={onBackToChat}>
Torna alla chat
</button>
</div>
</div>
</div>
</div>
<div className="d-flex justify-content-between mb-3">
<button className="btn btn-outline-primary" onClick={onBackToChat}>
Torna alla chat
</button>
<button className="btn btn-outline-success" onClick={updateModelData}>
🔄 Aggiorna dati modelli
</button>
</div>
{/* Contenuto principale */}
{showStats ? (
<LmStats />
) : (
<>
{/* InputGroup: cerca + aggiorna */}
<div className="mb-3">
<div className="input-group">
<input
type="text"
className="form-control"
placeholder="🔍 Cerca modello per nome..."
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
/>
<button
className="btn btn-outline-success"
type="button"
onClick={updateModelData}
>
🔄 Aggiorna info
</button>
</div>
</div>
<div className="mb-3">
<input
type="text"
className="form-control"
placeholder="🔍 Cerca modello per nome..."
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
/>
</div>
<div className="table-responsive">
<table className="table table-bordered table-hover align-middle">
<thead className="table-light">
<tr>
<th>Nome</th>
<th>Descrizione</th>
{/*<th>Anno</th>*/}
<th>Caratteristiche</th>
</tr>
</thead>
<tbody>
{loading && (
<tr>
<td colSpan="4" className="text-center">Caricamento</td>
</tr>
)}
{!loading && filteredModels.length === 0 && (
<tr>
<td colSpan="4" className="text-center">Nessun modello trovato</td>
</tr>
)}
{!loading && filteredModels.map((model) => (
<tr key={model.name}>
<td>{model.name}</td>
<td>{model.description || "—"}</td>
{/*<td>{model.year || "—"}</td>*/}
<td>
<ul className="mb-0 small">
{Array.isArray(model.features) && model.features.length > 0
? model.features.map((f, i) => <li key={i}>{f}</li>)
: <li></li>}
</ul>
</td>
</tr>
))}
</tbody>
</table>
</div>
<div className="table-responsive">
<table className="table table-bordered table-hover align-middle">
<thead className="table-light">
<tr>
<th onClick={() => handleSort("name")} style={{ cursor: "pointer" }}>
Nome {sortColumn === "name" && (sortDirection === "asc" ? "⬆" : "⬇")}
</th>
<th onClick={() => handleSort("description")} style={{ cursor: "pointer" }}>
Descrizione {sortColumn === "description" && (sortDirection === "asc" ? "⬆" : "⬇")}
</th>
<th onClick={() => handleSort("features")} style={{ cursor: "pointer" }}>
Caratteristiche {sortColumn === "features" && (sortDirection === "asc" ? "⬆" : "⬇")}
</th>
</tr>
</thead>
<tbody>
{loading && (
<tr>
<td colSpan="3" className="text-center">Caricamento</td>
</tr>
)}
{!loading && filteredModels.length === 0 && (
<tr>
<td colSpan="3" className="text-center">Nessun modello trovato</td>
</tr>
)}
{!loading && filteredModels.map((model) => (
<tr key={model.name}>
<td>{model.name}</td>
<td>{model.description || "—"}</td>
<td>
<ul className="mb-0 small">
{Array.isArray(model.features) && model.features.length > 0
? model.features.map((f, i) => <li key={i}>{f}</li>)
: <li></li>}
</ul>
</td>
</tr>
))}
</tbody>
</table>
</div>
</>
)}
</div>
);
}
-106
View File
@@ -1,106 +0,0 @@
// src/ModelOverview.jsx
import React, { useEffect, useState } from "react";
export default function ModelOverview({ onBackToChat }) {
const [defaultModel, setDefaultModel] = useState("");
const [modelInfo, setModelInfo] = useState([]);
const [loading, setLoading] = useState(false);
const fetchModelData = async () => {
setLoading(true);
try {
const [defaultRes, infoRes] = await Promise.all([
fetch("/v1/default-model"),
fetch("/v1/models-info")
]);
const defaultText = await defaultRes.text();
const infoJson = await infoRes.json();
setDefaultModel(defaultText);
setModelInfo(infoJson);
} catch (err) {
console.error("Errore nel caricamento modelli", err);
} finally {
setLoading(false);
}
};
const updateModelData = async () => {
setLoading(true);
try {
const res = await fetch("/v1/models-info/update", { method: "POST" });
if (res.ok) {
const updated = await res.json();
setModelInfo(updated);
}
} catch (err) {
console.error("Errore nell'aggiornamento modelli", err);
} finally {
setLoading(false);
}
};
useEffect(() => {
fetchModelData();
}, []);
return (
<div className="container py-4">
<div className="bg-light p-4 rounded shadow-sm mb-4">
<h3 className="mb-0">📌 Modello di default</h3>
<p className="lead text-muted mb-0">
{defaultModel || "Non disponibile"}
</p>
</div>
<div className="d-flex justify-content-between mb-3">
<button className="btn btn-outline-primary" onClick={onBackToChat}>
⬅ Torna alla chat
</button>
<button className="btn btn-outline-success" onClick={updateModelData}>
🔄 Aggiorna dati modelli
</button>
</div>
<div className="table-responsive">
<table className="table table-bordered table-hover align-middle">
<thead className="table-light">
<tr>
<th>Nome</th>
<th>Descrizione</th>
<th>Anno</th>
<th>Caratteristiche</th>
</tr>
</thead>
<tbody>
{loading && (
<tr>
<td colSpan="4" className="text-center">Caricamento…</td>
</tr>
)}
{!loading && modelInfo.length === 0 && (
<tr>
<td colSpan="4" className="text-center">Nessun modello disponibile</td>
</tr>
)}
{!loading && modelInfo.map((model) => (
<tr key={model.name}>
<td>{model.name}</td>
<td>{model.description || "—"}</td>
<td>{model.year || "—"}</td>
<td>
<ul className="mb-0 small">
{model.features?.map((f, i) => (
<li key={i}>{f}</li>
)) || <li>—</li>}
</ul>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
);
}