Merge branch 'release/ChatRestoreSession_01'

This commit is contained in:
Samuele Locatelli
2025-08-22 15:19:28 +02:00
20 changed files with 1953 additions and 144 deletions
+71 -16
View File
@@ -1,10 +1,12 @@
# main.py
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
import requests
from fastapi.responses import StreamingResponse
import redis
import json
import httpx
r = redis.Redis(host='localhost', port=6379, db=0)
r = redis.Redis(host='localhost', port=6379, db=1)
def save_chat(user_id, message):
r.rpush(f"chat:{user_id}", json.dumps(message))
@@ -15,7 +17,6 @@ def get_chat(user_id):
app = FastAPI()
# Allow frontend access
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
@@ -23,8 +24,8 @@ app.add_middleware(
allow_headers=["*"],
)
# Store sessions in memory (simple version)
sessions = {}
LM_STUDIO_URL = "http://10.74.83.100:1234/v1/chat/completions"
MODEL_NAME = "qwen/qwen3-4b-2507" # update as needed
@app.post("/chat")
async def chat(request: Request):
@@ -32,21 +33,75 @@ async def chat(request: Request):
user_id = data.get("user_id", "default")
message = data["message"]
# Retrieve session history
save_chat(user_id, {"role": "user", "content": message})
history = get_chat(user_id)
# Send to LM Studio
response = requests.post("http://10.74.83.100:1234/v1/chat/completions", json={
"model": "qwen/qwen3-4b-2507", # Replace with actual model name
"messages": history
})
result = response.json()
async with httpx.AsyncClient(timeout=None) as client:
resp = await client.post(LM_STUDIO_URL, json={
"model": MODEL_NAME,
"messages": history,
})
result = resp.json()
reply = result["choices"][0]["message"]["content"]
# save in REDIS chat history
save_chat(user_id, {"role": "assistant", "content": reply})
return {"response": reply}
@app.post("/chat-stream")
async def chat_stream(request: Request):
data = await request.json()
user_id = data.get("user_id", "default")
message = data["message"]
# Save user message and build history
save_chat(user_id, {"role": "user", "content": message})
history = get_chat(user_id)
async def event_generator():
assistant_text = ""
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream("POST", LM_STUDIO_URL, json={
"model": MODEL_NAME,
"messages": history,
"stream": True
}) as r:
async for raw_line in r.aiter_lines():
if not raw_line:
continue
# Normalize to standard SSE "data: ..." form
line = raw_line if raw_line.startswith("data:") else f"data: {raw_line}"
payload = line[len("data: "):].strip()
if payload == "[DONE]":
# Finalize and flush
yield "data: [DONE]\n\n"
break
# Echo the SSE line to client
yield f"data: {payload}\n\n"
# Accumulate content for saving to Redis
try:
obj = json.loads(payload)
choice = obj.get("choices", [{}])[0]
# Handle OpenAI-style streaming objects
delta = choice.get("delta", {})
piece = delta.get("content")
if piece is None:
# Some servers send "text" instead
piece = choice.get("text")
if piece:
assistant_text += piece
except Exception:
# Ignore non-JSON control lines
pass
finally:
if assistant_text:
save_chat(user_id, {"role": "assistant", "content": assistant_text})
headers = {
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no" # helps if behind Nginx
}
return StreamingResponse(event_generator(), media_type="text/event-stream", headers=headers)
+891
View File
File diff suppressed because it is too large Load Diff
+5
View File
@@ -2,9 +2,14 @@
"name": "lm-chat-frontend",
"version": "1.0.0",
"dependencies": {
"katex": "^0.16.22",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-markdown": "^10.1.0",
"react-syntax-highlighter": "^15.6.1",
"rehype-katex": "^7.0.1",
"remark-gfm": "^4.0.1",
"remark-math": "^6.0.0",
"vite": "^4.0.0"
},
"scripts": {
+41 -10
View File
@@ -1,8 +1,4 @@
/* src/App.css */
body {
font-family: sans-serif;
}
body, html {
margin: 0;
padding: 0;
@@ -21,25 +17,44 @@ body, html {
flex: 1;
overflow-y: auto;
padding: 1rem;
background-color: #f8f9fa; /* Bootstrap light gray */
overflow-anchor: none; /* prevent anchor jumps */
}
/* Wider bubbles — use Bootstrap breakpoints for responsiveness */
.message {
margin: 0.5rem 0;
padding: 0.75rem;
border-radius: 8px;
max-width: 80%;
padding: 0.9rem 1rem;
border-radius: 12px;
width: auto;
max-width: 95%; /* was 80% */
word-wrap: break-word;
}
.message:last-child {
overflow-anchor: auto; /* anchor here */
}
@media (min-width: 768px) {
.message {
max-width: 75%; /* keep some margin on larger screens */
}
}
.message.user {
background-color: #dbeafe;
background-color: #0d6efd; /* Bootstrap primary */
color: #fff;
align-self: flex-end;
text-align: right;
border-top-right-radius: 0; /* subtle speech bubble effect */
}
.message.assistant {
background-color: #e5e7eb;
background-color: #e9ecef; /* Bootstrap light gray */
color: #212529;
align-self: flex-start;
text-align: left;
border-top-left-radius: 0;
}
.input-bar {
@@ -60,10 +75,26 @@ body, html {
.send-button {
margin-left: 0.5rem;
padding: 0.75rem 1rem;
background-color: #3b82f6;
background-color: #0d6efd; /* Bootstrap primary */
color: white;
border: none;
border-radius: 8px;
cursor: pointer;
}
pre {
background-color: #212529; /* dark background for code */
color: #f8f9fa;
padding: 0.75rem;
border-radius: 0.375rem;
overflow-x: auto;
}
code {
font-family: 'Fira Code', monospace;
font-size: 0.9rem;
}
.btn-copy {
font-size: 0.75rem;
}
+49 -72
View File
@@ -1,81 +1,58 @@
import React, { useState, useRef, useEffect } from 'react';
import ReactMarkdown from 'react-markdown';
import './App.css';
//App.jsx
import React, { useState, useEffect } from "react";
import "./App.css";
import { themes } from "./themes";
import ChatLayout from "./ChatLayout";
import { useChatStream } from "./useChatStream";
import { getSessionId, getUserId, resetSessionId } from "./useSessionId";
function App() {
const [messages, setMessages] = useState([]);
const [input, setInput] = useState("");
const messagesEndRef = useRef(null);
const [loading, setLoading] = useState(false);
const sendMessage = async () => {
if (!input.trim()) return;
const userMessage = { role: "user", content: input };
setMessages(prev => [...prev, userMessage]);
setInput("");
setLoading(true); // 🚀 show loader
try {
const res = await fetch("/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ user_id: "user1", message: input })
});
const data = await res.json();
const botMessage = { role: "assistant", content: data.response };
setMessages(prev => [...prev, botMessage]);
} finally {
setLoading(false); // ✅ hide loader
}
};
export default function App() {
const { messages, loading, sendMessage, stopGenerating, setMessages } = useChatStream();
const [themeName, setThemeName] = useState("light");
const theme = themes[themeName];
const sessionId = getSessionId();
const userId = getUserId();
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages]);
const saved = localStorage.getItem("preferredTheme");
if (saved && themes[saved]) setThemeName(saved);
}, []);
useEffect(() => {
localStorage.setItem("preferredTheme", themeName);
}, [themeName]);
const toggleTheme = () => {
setThemeName((t) => (t === "light" ? "dark" : "light"));
};
const reloadHistory = async () => {
const res = await fetch(`/v1/history?user_id=${userId}&session_id=${sessionId}`);
const history = await res.json();
setMessages(history); // from useChatStream
};
const freshStart = async () => {
await fetch(`/v1/history?user_id=${userId}&session_id=${sessionId}`, { method: "DELETE" });
setMessages([]);
resetSessionId();
// or start a brand new sessionId:
//localStorage.removeItem("sessionId");
//window.location.reload();
};
return (
<div className="chat-container">
<header className="navbar navbar-dark bg-primary sticky-top">
<div className="container-fluid">
<span className="navbar-brand mb-0 h1">Egalware's LM Studio Chat</span>
</div>
</header>
<div className="chat-box">
{loading && (
<div className="d-flex justify-content-start p-2">
<div className="spinner-border text-secondary" role="status" style={{width: "1.5rem", height: "1.5rem"}}>
<span className="visually-hidden">Thinking...</span>
</div>
<span className="ms-2 text-muted">The model is processing...</span>
</div>
)}
{messages.map((msg, i) => (
<div key={i} className={`d-flex mb-2 ${msg.role === "user" ? "justify-content-end" : "justify-content-start"}`}>
<div className={`p-2 rounded shadow-sm ${msg.role === "user" ? "bg-primary text-white" : "bg-light text-dark border" }`} style={{ maxWidth: "75%" }}>
<ReactMarkdown>{msg.content}</ReactMarkdown>
</div>
</div>
))}
<div ref={messagesEndRef} />
</div>
<div className="input-bar d-flex justify-content-center p-3 bg-light border-top">
<div className="w-100 w-md-75 w-lg-50 d-flex">
<input className="form-control me-2"
value={input}
onChange={e => setInput(e.target.value)}
onKeyDown={e => e.key === "Enter" && sendMessage()}
placeholder="Type your message..."
autoFocus
/>
<button className="btn btn-primary" onClick={sendMessage}>Send</button>
</div>
</div>
</div>
<ChatLayout
theme={theme}
messages={messages}
loading={loading}
onSend={sendMessage}
onStop={stopGenerating}
onToggleTheme={toggleTheme}
onReloadHistory={reloadHistory}
onFreshStart={freshStart}
/>
);
}
export default App;
+177
View File
@@ -0,0 +1,177 @@
// src/App.jsx
import React, { useState, useRef, useEffect } from 'react';
import ReactMarkdown from 'react-markdown';
import remarkMath from 'remark-math';
import rehypeKatex from 'rehype-katex';
import MessageContent from './MessageContent';
import { useStreamBuffer } from './hooks/useStreamBuffer'
import './App.css';
import 'katex/dist/katex.min.css';
function App() {
const [messages, setMessages] = useState([]);
const [input, setInput] = useState("");
const [loading, setLoading] = useState(false);
const messagesEndRef = useRef(null);
const { buffered, pushChunk, reset } = useStreamBuffer(80);
const sendMessage = async () => {
if (!input.trim() || loading) return;
const userMessage = { role: "user", content: input };
const userId = "user1";
// Calculate where the assistant placeholder will land
const startIndex = messages.length;
const assistantIndex = startIndex + 1;
// Optimistic UI: user + empty assistant
setMessages(prev => [...prev, userMessage, { role: "assistant", content: "" }]);
setInput("");
setLoading(true);
reset(); // clear the buffer for the new response
try {
const res = await fetch("/v1/chat-stream", {
method: "POST",
headers: { "Content-Type": "application/json", "Accept": "text/event-stream" },
body: JSON.stringify({ user_id: userId, message: userMessage.content })
});
// Non-streaming fallback
if (!res.ok || !res.body) {
const json = await res.json().catch(() => null);
const text = json?.response ?? "Error: streaming not available.";
setMessages(prev => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: text };
return next;
});
setLoading(false);
return;
}
const reader = res.body.getReader();
const decoder = new TextDecoder("utf-8");
let acc = ""; // final committed text
let sseBuffer = ""; // raw SSE buffer
while (true) {
const { value, done } = await reader.read();
if (done) break;
sseBuffer += decoder.decode(value, { stream: true });
// Split on SSE event boundaries
const events = sseBuffer.split("\n\n");
sseBuffer = events.pop() || "";
for (const evt of events) {
const lines = evt.split("\n").map(l => l.trim()).filter(Boolean);
for (const line of lines) {
if (!line.startsWith("data:")) continue;
const data = line.slice(5).trim();
if (data === "[DONE]") {
// Commit final text and finish
setMessages(prev => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: acc };
return next;
});
setLoading(false);
return;
}
try {
const obj = JSON.parse(data);
const choice = obj?.choices?.[0] ?? {};
const delta = choice.delta ?? {};
const piece = delta.content ?? choice.text ?? "";
if (piece) {
acc += piece; // reliable final copy
pushChunk(piece); // smooth UI copy
}
} catch {
// ignore non-JSON control lines
}
}
}
}
// Stream ended without an explicit [DONE]
setMessages(prev => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: acc };
return next;
});
} catch (err) {
setMessages(prev => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: `Error: ${String(err)}` };
return next;
});
} finally {
setLoading(false);
}
};
useEffect(() => {
if (loading) {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}
}, [buffered, loading]);
return (
<div className="chat-container d-flex flex-column vh-100">
<header className="navbar navbar-dark bg-primary sticky-top">
<div className="container-fluid">
<span className="navbar-brand mb-0 h1">Egalware&apos;s LM Studio Chat</span>
</div>
</header>
<div className="chat-box container-fluid py-2 flex-grow-1 overflow-auto">
{messages.map((msg, i) => {
const isLastAssistant = i === messages.length - 1 && msg.role === "assistant" && loading;
return (
<div className="row mb-2" key={i}>
<div className={`col-12 d-flex ${msg.role === "user" ? "justify-content-end" : "justify-content-start"}`}>
<div className={`p-2 rounded-3 shadow-sm ${ msg.role === "user" ? "bg-primary bg-opacity-75 text-white" : "bg-light border text-dark" }`} style={{ width: "95%" }} >
<MessageContent content={isLastAssistant ? buffered : msg.content} />
</div>
</div>
</div>
);
})}
{loading && (
<div className="row text-muted ps-3">
<div className="d-flex align-items-center">
<div className="spinner-border spinner-border-sm me-2" role="status" />
The model is processing...
</div>
</div>
)}
<div ref={messagesEndRef} />
</div>
<div className="input-bar d-flex justify-content-center p-3 bg-light border-top">
<div className="w-100 w-md-75 w-lg-50 d-flex">
<input
className="form-control me-2"
value={input}
onChange={e => setInput(e.target.value)}
onKeyDown={e => e.key === "Enter" && sendMessage()}
placeholder="Type your message..."
disabled={loading}
autoFocus
/>
<button className="btn btn-primary" onClick={sendMessage} disabled={loading}>
{loading ? "Sending..." : "Send"}
</button>
</div>
</div>
</div>
);
}
export default App;
+31
View File
@@ -0,0 +1,31 @@
import React from "react";
import ReactMarkdown from "react-markdown";
import remarkGfm from "remark-gfm";
import remarkMath from "remark-math";
import rehypeKatex from "rehype-katex";
export default function AssistantMessage({ content, theme }) {
return (
<div className="mb-2 text-start">
<div
className={`d-inline-block p-2 rounded ${theme.assistantBg}`}
style={{ maxWidth: "95%" }}
>
<ReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeKatex]}
components={{
table: (props) => (
<table className="table table-sm table-bordered" {...props} />
),
th: (props) => <th className="bg-light" {...props} />
}}
>
{content}
</ReactMarkdown>
</div>
</div>
);
}
+48
View File
@@ -0,0 +1,48 @@
// ChatHeader.jsx
import React from "react";
export default function ChatHeader({
theme,
onToggleTheme,
onReloadHistory,
onFreshStart
}) {
return (
<header
className={`${theme.headerBg} py-3 sticky-top shadow row align-items-center`}
>
{/* Left column: new buttons */}
<div className="col-3 d-flex gap-2 justify-content-start">
<button
className="btn btn-sm btn-outline-light"
onClick={onReloadHistory}
>
🔄 Reload
</button>
<button
className="btn btn-sm btn-danger"
onClick={onFreshStart}
>
🆕 Fresh Start
</button>
</div>
{/* Center column: title */}
<div className="col-6 text-center">
<h4 className="mb-0">🤖 EgalWare&apos;s LLM ChatBot</h4>
</div>
{/* Right column: theme toggle */}
<div className="col-3 text-end">
<button
className="btn btn-sm btn-outline-light"
onClick={onToggleTheme}
>
Toggle Theme
</button>
</div>
</header>
);
}
+31
View File
@@ -0,0 +1,31 @@
import React, { useState } from "react";
export default function ChatInput({ onSend, loading }) {
const [value, setValue] = useState("");
const handleKeyDown = (e) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
if (value.trim()) {
onSend(value);
setValue("");
}
}
};
return (
<div className="p-2 border-top bg-white">
<textarea
className="form-control"
rows={3}
value={value}
onChange={(e) => setValue(e.target.value)}
onKeyDown={handleKeyDown}
placeholder="Type a message... (Shift+Enter for newline)"
disabled={loading}
/>
</div>
);
}
+38
View File
@@ -0,0 +1,38 @@
// ChatLayout.jsx
import React from "react";
import ChatHeader from "./ChatHeader";
import ChatWindow from "./ChatWindow";
import ChatInput from "./ChatInput";
export default function ChatLayout({
theme,
messages,
loading,
onSend,
onStop,
onToggleTheme,
onReloadHistory,
onFreshStart
}) {
return (
<div className={`mx-3 d-flex flex-column vh-100 ${theme.bodyBg}`}>
<ChatHeader
theme={theme}
onToggleTheme={onToggleTheme}
onReloadHistory={onReloadHistory}
onFreshStart={onFreshStart}
/>
<ChatWindow messages={messages} loading={loading} theme={theme} />
{loading && (
<div className="p-2 text-center">
<button className="btn btn-warning btn-sm" onClick={onStop}>
Stop Generating
</button>
</div>
)}
<ChatInput onSend={onSend} loading={loading} />
</div>
);
}
+31
View File
@@ -0,0 +1,31 @@
import React, { useRef, useEffect } from "react";
import UserMessage from "./UserMessage";
import AssistantMessage from "./AssistantMessage";
export default function ChatWindow({ messages, loading, theme }) {
const endRef = useRef(null);
useEffect(() => {
endRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages]);
return (
<main className={`flex-grow-1 overflow-auto p-3 ${theme.bodyBg}`}>
{messages.map((m, idx) =>
m.role === "user" ? (
<UserMessage key={idx} content={m.content} theme={theme} />
) : (
<AssistantMessage key={idx} content={m.content} theme={theme} />
)
)}
{loading && (
<div className="text-muted small fst-italic">
The model is processing...
</div>
)}
<div ref={endRef} />
</main>
);
}
+27
View File
@@ -0,0 +1,27 @@
/* MessageContent.css */
.table {
width: auto;
border-collapse: collapse;
}
.table th,
.table td {
border: 1px solid #dee2e6;
padding: 0.25rem 0.5rem;
vertical-align: top;
}
code {
white-space: pre-wrap;
word-break: break-word;
}
.md-pre-wrapper {
min-height: 5rem; /* adjust to taste */
}
.md-pre-wrapper pre,
.md-pre-wrapper > div { /* syntax highlighter wrapper */
margin: 0;
overflow: auto;
}
+72
View File
@@ -0,0 +1,72 @@
// MessageContent.jsx
import React, { useCallback } from 'react';
import ReactMarkdown from 'react-markdown';
import remarkMath from 'remark-math';
import remarkGfm from 'remark-gfm';
import rehypeKatex from 'rehype-katex';
import 'katex/dist/katex.min.css';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { oneDark } from 'react-syntax-highlighter/dist/esm/styles/prism';
import './MessageContent.css';
export default function MessageContent({ content }) {
const handleCopy = useCallback(async (text) => {
try {
await navigator.clipboard.writeText(text);
} catch {
/* you could add a toast here if desired */
}
}, []);
return (
<ReactMarkdown
remarkPlugins={[remarkMath, remarkGfm]}
rehypePlugins={[rehypeKatex]}
components={{
table: (props) => <table className="table table-bordered table-sm" {...props} />,
thead: (props) => <thead className="table-light" {...props} />,
code({ inline, className, children, ...props }) {
const codeText = String(children).replace(/\n$/, '');
const match = /language-(\w+)/.exec(className || '');
const isMultiLine = codeText.includes('\n');
if (inline || !isMultiLine) {
// Inline or single-line — render plainly
return (
<code className={className} {...props}>
{children}
</code>
);
}
// Multi-line fenced code block → highlight + copy button
return (
<div className="position-relative md-pre-wrapper">
<SyntaxHighlighter
style={oneDark}
language={match ? match[1] : null}
PreTag="div"
customStyle={{ margin: 0, borderRadius: '0.25rem', minHeight: '5rem' }}
>
{codeText || ' '} {/* keep at least a space to hold height */}
</SyntaxHighlighter>
<button
type="button"
className="btn btn-sm btn-outline-secondary position-absolute top-0 end-0 m-1 d-flex align-items-center gap-1"
onClick={() => handleCopy(codeText)}
>
<span role="img" aria-label="Copy">
📋
</span>
Copy
</button>
</div>
);
},
}}
>
{content}
</ReactMarkdown>
);
}
+27
View File
@@ -0,0 +1,27 @@
// UserMessage.jsx
import React from "react";
export default function UserMessage({ content, theme }) {
return (
<div className="mb-2 text-end">
<div
className={`d-inline-block p-2 rounded ${theme.userBg}`}
style={{ maxWidth: "95%" }}
>
<pre
className="m-0"
style={{
whiteSpace: "pre-wrap",
fontFamily: "inherit",
backgroundColor: "transparent", // kill default <pre> background
color: "inherit", // match bubble text color
}}
>
{content}
</pre>
</div>
</div>
);
}
+25
View File
@@ -0,0 +1,25 @@
// src/hooks/useStreamBuffer.js
import { useState, useRef } from 'react';
export function useStreamBuffer(updateInterval = 80) {
const [buffered, setBuffered] = useState('');
const bufferRef = useRef('');
const lastUpdateRef = useRef(0);
const pushChunk = (chunk) => {
bufferRef.current += chunk;
const now = performance.now();
if (now - lastUpdateRef.current > updateInterval) {
setBuffered(bufferRef.current);
lastUpdateRef.current = now;
}
};
const reset = () => {
bufferRef.current = '';
setBuffered('');
lastUpdateRef.current = 0;
};
return { buffered, pushChunk, reset };
}
+17
View File
@@ -0,0 +1,17 @@
// themes.js
export const themes = {
light: {
userBg: "bg-primary text-white",
assistantBg: "bg-white border text-dark",
bodyBg: "bg-light",
headerBg: "bg-primary text-white",
},
dark: {
userBg: "bg-dark text-white",
assistantBg: "bg-secondary text-white",
bodyBg: "bg-black text-white",
headerBg: "bg-dark text-white",
}
};
+118
View File
@@ -0,0 +1,118 @@
// useChatStream.js
import { useState, useCallback, useRef } from "react";
import { getSessionId, getUserId } from './useSessionId';
export function useChatStream() {
const [messages, setMessages] = useState([]);
const [loading, setLoading] = useState(false);
const abortRef = useRef(null);
const sessionId = getSessionId();
const userId = getUserId();
const sendMessage = useCallback(
async (input) => {
if (!input.trim()) return;
// Abort any previous stream
if (abortRef.current) {
abortRef.current.abort();
}
const controller = new AbortController();
abortRef.current = controller;
const userMessage = { role: "user", content: input };
const assistantIndex = messages.length + 1;
setMessages((prev) => [...prev, userMessage, { role: "assistant", content: "" }]);
setLoading(true);
try {
const res = await fetch("/v1/chat-stream", {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "text/event-stream",
},
body: JSON.stringify({ user_id: userId, session_id: sessionId, message: input }),
signal: controller.signal,
});
if (!res.ok || !res.body) throw new Error(`HTTP ${res.status}`);
const reader = res.body.getReader();
const decoder = new TextDecoder("utf-8");
let buffer = "";
let acc = "";
while (true) {
const { value, done } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split("\n\n");
buffer = parts.pop() || "";
for (const part of parts) {
const line = part
.split("\n")
.map((l) => l.trim())
.find((l) => l.startsWith("data:"));
if (!line) continue;
const data = line.slice(5).trim();
if (data === "[DONE]") {
setMessages((prev) => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: acc };
return next;
});
setLoading(false);
return;
}
try {
const obj = JSON.parse(data);
const piece = obj?.choices?.[0]?.delta?.content ?? obj?.choices?.[0]?.text ?? "";
if (piece) {
acc += piece;
setMessages((prev) => {
const next = [...prev];
next[assistantIndex] = { role: "assistant", content: acc };
return next;
});
}
} catch {
/* ignore malformed JSON chunks */
}
}
}
} catch (err) {
if (err.name !== "AbortError") {
setMessages((prev) => {
const next = [...prev];
next[assistantIndex] = {
role: "assistant",
content: `Error: ${String(err)}`,
};
return next;
});
}
} finally {
setLoading(false);
abortRef.current = null;
}
},
[messages]
);
const stopGenerating = useCallback(() => {
if (abortRef.current) {
abortRef.current.abort();
abortRef.current = null;
setLoading(false);
}
}, []);
return { messages, loading, sendMessage, stopGenerating, setMessages };
}
+26
View File
@@ -0,0 +1,26 @@
// useIds.js
export function getUserId() {
let id = localStorage.getItem("userId");
if (!id) {
id = crypto.randomUUID();
localStorage.setItem("userId", id);
}
return id;
}
export function getSessionId() {
let id = localStorage.getItem("sessionId");
if (!id) {
id = crypto.randomUUID();
localStorage.setItem("sessionId", id);
}
return id;
}
export function resetSessionId() {
const newId = crypto.randomUUID();
localStorage.setItem("sessionId", newId);
return newId;
}
+162 -46
View File
@@ -1,66 +1,182 @@
####
#### Streamlit Streaming using LM Studio as OpenAI Standin
#### run with `streamlit run app.py`
# !pip install pypdf langchain langchain_openai
import streamlit as st
import streamlit.components.v1 as components
import redis
import json
import uuid
import hashlib
from langchain_core.messages import AIMessage, HumanMessage
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
# app config
st.set_page_config(page_title="Egalware Chatbot", page_icon="🤖")
st.title("Egalware's Streaming Chatbot")
# ---------------------
# Redis connection
# ---------------------
r = redis.Redis(host="localhost", port=6379, decode_responses=True)
def get_response(user_query, chat_history):
# ---------------------
# Session ID helpers
# ---------------------
def get_or_set_session_id():
"""Get or set session_id from browser localStorage and trigger rerun when first set."""
if "session_id" in st.session_state:
return st.session_state.session_id
template = """
You are a helpful assistant. Answer the following questions considering the history of the conversation:
components.html(f"""
<script>
const key = 'egalware_session_id';
let sid = window.localStorage.getItem(key);
if (!sid) {{
sid = '{uuid.uuid4()}';
window.localStorage.setItem(key, sid);
}}
const streamlitDoc = window.parent.document;
let hidden = streamlitDoc.querySelector('#session_id_input');
if (!hidden) {{
hidden = document.createElement('input');
hidden.type = 'hidden';
hidden.id = 'session_id_input';
streamlitDoc.body.appendChild(hidden);
}}
hidden.value = sid;
hidden.dispatchEvent(new Event('input', {{ bubbles: true }}));
</script>
""", height=0)
Chat history: {chat_history}
sid = st.text_input("session_id_input", label_visibility="collapsed")
if sid and sid != st.session_state.get("session_id"):
st.session_state.session_id = sid
st.rerun() # immediately rerun with the new ID
return st.session_state.get("session_id")
User question: {user_question}
"""
# ---------------------
# Chat history persistence
# ---------------------
def load_history(session_id):
raw = r.get(f"chatbot:history:{session_id}")
if raw:
messages = json.loads(raw)
return [
AIMessage(content=m["content"]) if m["type"] == "ai"
else HumanMessage(content=m["content"])
for m in messages
]
return [AIMessage(content="Hello, I am EgalWare's ChatBot. How can I help you? (puoi fare domande in italiano, ma in inglese funziona meglio...)")]
prompt = ChatPromptTemplate.from_template(template)
# Using LM Studio Local Inference Server
llm = ChatOpenAI(base_url="http://10.74.83.100:1234/v1",api_key="lm-studio", model="qwen/qwen3-4b-2507")
chain = prompt | llm | StrOutputParser()
return chain.stream({
"chat_history": chat_history,
"user_question": user_query,
})
# session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = [
AIMessage(content="Hello, I am a bot. How can I help you?"),
def save_history(session_id, history):
messages = [
{"type": "ai" if isinstance(m, AIMessage) else "human", "content": m.content}
for m in history
]
r.set(f"chatbot:history:{session_id}", json.dumps(messages))
r.expire(f"chatbot:history:{session_id}", 60*60*24*7)
# conversation
def delete_history(session_id):
r.delete(f"chatbot:history:{session_id}")
for key in r.scan_iter(f"chatbot:cache:{session_id}:*"):
r.delete(key)
# ---------------------
# Caching
# ---------------------
def get_cache_key(session_id, prompt):
digest = hashlib.sha256(prompt.encode()).hexdigest()
return f"chatbot:cache:{session_id}:{digest}"
def get_cached_response(session_id, prompt):
return r.get(get_cache_key(session_id, prompt))
def set_cached_response(session_id, prompt, response):
r.setex(get_cache_key(session_id, prompt), 300, response)
# ---------------------
# LLM
# ---------------------
def get_response(session_id, user_query, chat_history):
cached = get_cached_response(session_id, user_query)
if cached:
yield cached
return
prompt = ChatPromptTemplate.from_template(
"You are a helpful assistant. Answer the following considering the history:\n\n"
"Chat history: {chat_history}\n\nUser question: {user_question}"
)
llm = ChatOpenAI(
base_url="http://10.74.83.100:1234/v1",
api_key="lm-studio",
model="qwen/qwen3-4b-2507"
)
chain = prompt | llm | StrOutputParser()
full_resp = ""
for chunk in chain.stream({
"chat_history": chat_history,
"user_question": user_query
}):
full_resp += chunk
yield chunk
set_cached_response(session_id, user_query, full_resp)
# ---------------------
# UI
# ---------------------
st.set_page_config(page_title="Egalware Chatbot", page_icon="🤖")
session_id = get_or_set_session_id()
# Sticky header CSS
st.markdown("""
<style>
.sticky-header {
position: sticky;
top: 0;
background-color: white;
padding-top: 0.5rem;
padding-bottom: 0.5rem;
z-index: 999;
border-bottom: 1px solid #ddd;
}
</style>
""", unsafe_allow_html=True)
# Header
st.markdown('<div class="sticky-header">', unsafe_allow_html=True)
col_title, col_btn = st.columns([0.9, 0.1])
with col_title:
st.title("Egalware's Chatbot")
with col_btn:
if st.button("🗑️", help="Clear conversation", type="secondary"):
delete_history(session_id)
st.session_state.chat_history = load_history(session_id)
st.rerun()
st.markdown('</div>', unsafe_allow_html=True)
# If still no ID, display placeholder history so UI doesn't look empty
if not session_id:
if "chat_history" not in st.session_state:
st.session_state.chat_history = [AIMessage(content="Initializing session… please wait")]
else:
if "chat_history" not in st.session_state:
st.session_state.chat_history = load_history(session_id)
# Conversation display
for message in st.session_state.chat_history:
if isinstance(message, AIMessage):
with st.chat_message("AI"):
st.write(message.content)
elif isinstance(message, HumanMessage):
with st.chat_message("Human"):
st.write(message.content)
role = "AI" if isinstance(message, AIMessage) else "Human"
with st.chat_message(role):
st.write(message.content)
# user input
user_query = st.chat_input("Type your message here...")
if user_query is not None and user_query != "":
# Input
user_query = st.chat_input("Type your message here")
if session_id and user_query:
st.session_state.chat_history.append(HumanMessage(content=user_query))
with st.chat_message("Human"):
st.markdown(user_query)
with st.chat_message("AI"):
response = st.write_stream(get_response(user_query, st.session_state.chat_history))
response_text = st.write_stream(
get_response(session_id, user_query, st.session_state.chat_history)
)
st.session_state.chat_history.append(AIMessage(content=response_text))
save_history(session_id, st.session_state.chat_history)
st.session_state.chat_history.append(AIMessage(content=response))
+66
View File
@@ -0,0 +1,66 @@
####
#### Streamlit Streaming using LM Studio as OpenAI Standin
#### run with `streamlit run app.py`
# !pip install pypdf langchain langchain_openai
import streamlit as st
from langchain_core.messages import AIMessage, HumanMessage
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
# app config
st.set_page_config(page_title="Egalware Chatbot", page_icon="🤖")
st.title("Egalware's Chatbot")
def get_response(user_query, chat_history):
template = """
You are a helpful assistant. Answer the following questions considering the history of the conversation:
Chat history: {chat_history}
User question: {user_question}
"""
prompt = ChatPromptTemplate.from_template(template)
# Using LM Studio Local Inference Server
llm = ChatOpenAI(base_url="http://10.74.83.100:1234/v1",api_key="lm-studio", model="qwen/qwen3-4b-2507")
chain = prompt | llm | StrOutputParser()
return chain.stream({
"chat_history": chat_history,
"user_question": user_query,
})
# session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = [
AIMessage(content="Hello, I am EgalWare's current ChatBot. How can I help you? (puoi fare domande in italiano, ma in inglese funziona meglio...)"),
]
# conversation
for message in st.session_state.chat_history:
if isinstance(message, AIMessage):
with st.chat_message("AI"):
st.write(message.content)
elif isinstance(message, HumanMessage):
with st.chat_message("Human"):
st.write(message.content)
# user input
user_query = st.chat_input("Type your message here...")
if user_query is not None and user_query != "":
st.session_state.chat_history.append(HumanMessage(content=user_query))
with st.chat_message("Human"):
st.markdown(user_query)
with st.chat_message("AI"):
response = st.write_stream(get_response(user_query, st.session_state.chat_history))
st.session_state.chat_history.append(AIMessage(content=response))