textai-v2 / ui /chat.py
rbt2025's picture
Deploy TextAI v2 - Clean architecture
ba3cc2b verified
"""
Chat UI Component
Clean ChatGPT/Grok style interface - Webpage layout
"""
import gradio as gr
from typing import List, Tuple, Optional, Generator
from core.models import get_model_service
from core.sessions import get_session_service
from core.state import get_state
from core.logger import logger
DEFAULT_SYSTEM_PROMPT = """You are a helpful AI assistant. Be concise, accurate, and helpful."""
def build_chat_ui():
"""Build the chat interface with proper webpage-style layout"""
# ══════════════════════════════════════════════════════════════════
# HELPER FUNCTIONS
# ══════════════════════════════════════════════════════════════════
def get_sessions_for_sidebar() -> List[List]:
"""Get sessions formatted for sidebar display"""
service = get_session_service()
sessions = service.get_all_sessions()
return [[s["id"], s["title"][:30] + "..." if len(s["title"]) > 30 else s["title"]]
for s in sessions[:20]]
def get_model_choices() -> List[str]:
"""Get models for dropdown"""
service = get_model_service()
models = service.get_installed_models()
loaded_id = get_state().get_loaded_model_id()
choices = []
for m in models:
prefix = "● " if m["id"] == loaded_id else ""
choices.append(f"{prefix}{m['name']}")
return choices if choices else ["No models installed"]
def get_current_model_display() -> str:
"""Get current model for header display"""
service = get_model_service()
model = service.get_loaded_model()
return f"● {model['name']}" if model else "No model loaded"
def format_history(session_id: str) -> List[Tuple[str, str]]:
"""Format session messages for Gradio chatbot"""
service = get_session_service()
display = service.get_session_for_display(session_id)
if not display:
return []
return display.get("history", [])
# ══════════════════════════════════════════════════════════════════
# EVENT HANDLERS
# ══════════════════════════════════════════════════════════════════
def on_new_chat(system_prompt: str = ""):
"""Create new chat session"""
service = get_session_service()
session = service.create_session(
system_prompt=system_prompt or DEFAULT_SYSTEM_PROMPT
)
logger.info("Chat", f"New session: {session['id']}")
return session["id"], [], get_sessions_for_sidebar(), ""
def on_session_select(evt: gr.SelectData, sessions_data):
"""Load selected session"""
try:
if evt.index is not None:
row_idx = evt.index[0] if isinstance(evt.index, (list, tuple)) else evt.index
# Handle both DataFrame and list
if hasattr(sessions_data, 'values'):
data = sessions_data.values.tolist()
else:
data = sessions_data if sessions_data else []
if data and row_idx < len(data):
session_id = data[row_idx][0]
service = get_session_service()
service.set_active_session(session_id)
history = format_history(session_id)
logger.info("Chat", f"Loaded session: {session_id}")
return session_id, history
except Exception as e:
logger.error("Chat", f"Session select error: {e}")
return "", []
def on_send_message(
session_id: str,
message: str,
history: List,
max_tokens: int,
temperature: float,
system_prompt: str
):
"""Send message and get response"""
if not message.strip():
return history, "", session_id, get_sessions_for_sidebar()
model_service = get_model_service()
session_service = get_session_service()
# Check model
if not model_service.is_model_loaded():
history = history + [(message, "⚠️ Please load a model first. Go to Models tab.")]
return history, "", session_id, get_sessions_for_sidebar()
# Create session if needed
if not session_id:
session = session_service.create_session(system_prompt=system_prompt)
session_id = session["id"]
# Add user message to session
session_service.add_message(session_id, "user", message)
# Build messages for model
session = session_service.get_session(session_id)
messages = []
if session.get("system_prompt"):
messages.append({"role": "system", "content": session["system_prompt"]})
for msg in session.get("messages", []):
messages.append({"role": msg["role"], "content": msg["content"]})
# Generate response
try:
response = model_service.generate(messages, max_tokens, temperature)
session_service.add_message(session_id, "assistant", response)
history = history + [(message, response)]
except Exception as e:
logger.error("Chat", f"Generation error: {e}")
history = history + [(message, f"⚠️ Error: {e}")]
return history, "", session_id, get_sessions_for_sidebar()
def on_load_model(choice: str):
"""Load selected model"""
if not choice or choice == "No models installed":
return get_current_model_display(), "Select a model"
name = choice.replace("● ", "")
model_service = get_model_service()
models = model_service.get_installed_models()
for m in models:
if m["name"] == name:
result = model_service.load_model(m["id"])
if result["success"]:
return get_current_model_display(), f"βœ“ Loaded"
else:
return get_current_model_display(), f"βœ— {result.get('error')}"
return get_current_model_display(), "Not found"
def on_delete_session(session_id: str):
"""Delete current session"""
if session_id:
get_session_service().delete_session(session_id)
return "", [], get_sessions_for_sidebar()
def toggle_sidebar(visible: bool):
"""Toggle sidebar visibility"""
return gr.update(visible=not visible), not visible
# ══════════════════════════════════════════════════════════════════
# BUILD UI - Webpage Style Layout
# ══════════════════════════════════════════════════════════════════
# State
current_session_id = gr.State("")
sidebar_visible = gr.State(True)
# Main container
with gr.Row(elem_id="main-container"):
# ─────────────────────────────────────────────────────────────
# SIDEBAR
# ─────────────────────────────────────────────────────────────
with gr.Column(scale=1, min_width=240, elem_id="sidebar", visible=True) as sidebar:
btn_new_chat = gr.Button("+ New Chat", variant="secondary", size="lg")
gr.Markdown("**Recent Chats**")
sessions_list = gr.Dataframe(
headers=["id", "Title"],
value=get_sessions_for_sidebar(),
interactive=False,
show_label=False,
row_count=10,
column_count=(2, "fixed"),
elem_id="sessions-table"
)
gr.Markdown("---")
gr.Markdown("**Model**")
model_dropdown = gr.Dropdown(
choices=get_model_choices(),
value=None,
label="",
show_label=False
)
btn_load_model = gr.Button("Load Model", size="sm")
model_status = gr.Textbox(value="", show_label=False, interactive=False, max_lines=1)
# ─────────────────────────────────────────────────────────────
# CHAT AREA
# ─────────────────────────────────────────────────────────────
with gr.Column(scale=4, elem_id="chat-area"):
# Header bar
with gr.Row(elem_id="chat-header"):
btn_toggle = gr.Button("☰", size="sm", elem_id="toggle-btn")
current_model = gr.Textbox(
value=get_current_model_display(),
show_label=False,
interactive=False,
elem_id="model-display"
)
btn_delete = gr.Button("πŸ—‘οΈ", size="sm")
# Chat messages - FIXED HEIGHT with scroll
chatbot = gr.Chatbot(
value=[],
label="",
show_label=False,
height=400,
elem_id="chatbot",
autoscroll=False
)
# Input area at bottom
with gr.Row(elem_id="input-row"):
chat_input = gr.Textbox(
placeholder="Type your message...",
show_label=False,
lines=1,
max_lines=3,
scale=9,
elem_id="chat-input"
)
btn_send = gr.Button("Send", variant="primary", scale=1)
# Settings (collapsible)
with gr.Accordion("Settings", open=False):
max_tokens = gr.Slider(64, 2048, value=512, step=64, label="Max Tokens")
temperature = gr.Slider(0.1, 2.0, value=0.7, step=0.1, label="Temperature")
system_prompt = gr.TextArea(value=DEFAULT_SYSTEM_PROMPT, label="System Prompt", lines=2)
# ══════════════════════════════════════════════════════════════════
# WIRE UP EVENTS
# ══════════════════════════════════════════════════════════════════
# Toggle sidebar
btn_toggle.click(
toggle_sidebar,
inputs=[sidebar_visible],
outputs=[sidebar, sidebar_visible]
)
# New Chat
btn_new_chat.click(
on_new_chat,
inputs=[system_prompt],
outputs=[current_session_id, chatbot, sessions_list, chat_input]
)
# Session select
sessions_list.select(
on_session_select,
inputs=[sessions_list],
outputs=[current_session_id, chatbot]
)
# Send message
btn_send.click(
on_send_message,
inputs=[current_session_id, chat_input, chatbot, max_tokens, temperature, system_prompt],
outputs=[chatbot, chat_input, current_session_id, sessions_list]
)
chat_input.submit(
on_send_message,
inputs=[current_session_id, chat_input, chatbot, max_tokens, temperature, system_prompt],
outputs=[chatbot, chat_input, current_session_id, sessions_list]
)
# Load model
btn_load_model.click(
on_load_model,
inputs=[model_dropdown],
outputs=[current_model, model_status]
)
# Delete session
btn_delete.click(
on_delete_session,
inputs=[current_session_id],
outputs=[current_session_id, chatbot, sessions_list]
)
return {
"chatbot": chatbot,
"input": chat_input,
"sessions": sessions_list,
"model_dropdown": model_dropdown,
"current_model": current_model,
"session_id": current_session_id
}