""" TextAI v2 - Simple Chat Interface Based on text_space pattern - Chat tab + Tools tab """ import gradio as gr import json # Core imports from modules.config import VERSION from modules.theme import DARK_CSS, get_theme from modules.logger import logger from modules.system import get_system_info, get_system_dashboard, get_space_info, get_storage_stats from modules.utils import format_size # File & Tools imports from modules.file_manager import ( get_folder_contents, get_breadcrumb, get_status, get_quick_access_folders, go_up, go_home, refresh_all, search_files, upload_files, download_file, create_file, read_file, delete_item ) from modules.ui_helpers import ( ui_create_folder, ui_create_file, ui_delete, on_folder_tree_select, navigate_from_main, get_events_table, get_logs_display, ui_refresh_logs, ui_clear_logs ) from modules.hf_hub import ( search_hf_models_enhanced, download_model_file, list_downloaded_models, remove_model, get_model_files, TEXT_MODELS_DIR, get_installed_model_ids ) from modules.api import ping, get_api_list, api_get_logs # TextAI imports try: from modules.text_ai import ( model_manager, session_manager, DEFAULT_CHAT_PROMPT, DEFAULT_ROLEPLAY_PROMPT, api_list_models, api_load_model, api_unload_model, api_model_status, api_chat, api_create_session, api_list_sessions, api_get_session, api_delete_session, api_inference ) from modules.text_ui import ( get_chat_list, get_model_choices, get_current_model_display, format_chat_history_tuples, get_models_table, ui_new_chat, ui_load_session, ui_send_message, ui_rename_session, ui_delete_session, ui_clear_chat, ui_load_model_by_index, ui_unload_model, get_suggested_models_table, SUGGESTED_MODELS, DEFAULT_MODEL, download_default_model ) TEXTAI_AVAILABLE = True except Exception as e: print(f"TextAI import error: {e}") TEXTAI_AVAILABLE = False def get_chat_list(): return [] def get_current_model_display(): return "Not available" def get_models_table(): return [] def get_suggested_models_table(): return [] def download_default_model(): return "TextAI not available" model_manager = None DEFAULT_MODEL = {"name": "TinyLlama 1.1B", "size": "0.7GB"} DEFAULT_CHAT_PROMPT = "" # ══════════════════════════════════════════════════════════════════════════════ # HELPER FUNCTIONS # ══════════════════════════════════════════════════════════════════════════════ def get_initial_installed_models(): """Get installed models for initial table display""" rows = [] try: if not TEXT_MODELS_DIR.exists(): return rows for d in TEXT_MODELS_DIR.iterdir(): if d.is_dir(): gguf_files = list(d.glob("*.gguf")) if gguf_files: for f in gguf_files: size = format_size(f.stat().st_size) rows.append([f.stem, "GGUF", size, "Ready"]) elif (d / "config.json").exists(): total_size = sum(f.stat().st_size for f in d.rglob("*") if f.is_file()) rows.append([d.name, "Transformers", format_size(total_size), "Ready"]) for f in TEXT_MODELS_DIR.glob("*.gguf"): size = format_size(f.stat().st_size) rows.append([f.stem, "GGUF", size, "Ready"]) except Exception as e: print(f"Initial installed models error: {e}") return rows # ══════════════════════════════════════════════════════════════════════════════ # CUSTOM CSS # ══════════════════════════════════════════════════════════════════════════════ CHAT_CSS = """ /* Clean ChatGPT/Grok Style */ .chat-container { max-width: 900px; margin: 0 auto; } .sidebar { border-right: 1px solid #333; } .chat-list { max-height: 70vh; overflow-y: auto; } .chat-item { padding: 10px; cursor: pointer; border-radius: 8px; margin: 4px 0; } .chat-item:hover { background: #2a2a2a; } .menu-btn { background: transparent !important; border: none !important; } .full-width { width: 100% !important; } .no-label label { display: none !important; } .model-badge { background: #1a1a2e; padding: 4px 12px; border-radius: 20px; font-size: 12px; color: #888; } .settings-panel { background: #1a1a1a; border-radius: 12px; padding: 16px; } #chatbot { min-height: 500px; } .message { white-space: pre-wrap; } """ # ══════════════════════════════════════════════════════════════════════════════ # BUILD APP # ══════════════════════════════════════════════════════════════════════════════ with gr.Blocks(title="TextAI v2") as demo: # Hidden state current_session_id = gr.State("") is_roleplay_mode = gr.State(False) with gr.Tabs() as main_tabs: # ══════════════════════════════════════════════════════════════════════ # TAB 1: CHAT # ══════════════════════════════════════════════════════════════════════ with gr.Tab("Chat", id=0): with gr.Row(): # LEFT SIDEBAR - Chat List with gr.Column(scale=1, min_width=250): with gr.Row(): btn_new_chat = gr.Button("+ New Chat", variant="primary", size="sm") chat_list = gr.Dataframe( headers=["id", "Chat"], value=get_chat_list() if TEXTAI_AVAILABLE else [], interactive=False, row_count=15, column_count=(2, "fixed"), show_label=False ) # MAIN CHAT AREA with gr.Column(scale=4): with gr.Row(): model_display = gr.Textbox( value=get_current_model_display() if TEXTAI_AVAILABLE else "No model", interactive=False, show_label=False, scale=3, elem_classes="model-badge" ) with gr.Column(scale=1): menu_dropdown = gr.Dropdown( choices=["Switch Model", "Rename Chat", "Delete Chat", "Clear Chat"], label="", show_label=False, scale=1 ) chat_title = gr.Textbox( value="New Chat", show_label=False, interactive=False, elem_classes="no-label" ) chatbot = gr.Chatbot( label="", height=500, show_label=False, elem_id="chatbot" ) with gr.Row(): chat_input = gr.Textbox( placeholder="Send a message...", show_label=False, scale=6, lines=1, max_lines=5 ) btn_send = gr.Button("Send", variant="primary", scale=1) # RIGHT SIDEBAR - Quick Settings with gr.Column(scale=1, min_width=180, visible=True): gr.Markdown("### Settings") max_tokens = gr.Slider( minimum=64, maximum=2048, value=512, step=64, label="Max Tokens" ) temperature = gr.Slider( minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature" ) gr.Markdown("---") roleplay_toggle = gr.Checkbox(label="Roleplay Mode", value=False) # Hidden panels with gr.Row(visible=False) as rename_panel: rename_input = gr.Textbox(label="New Title", scale=3) btn_rename = gr.Button("Rename", scale=1) btn_cancel_rename = gr.Button("Cancel", scale=1) with gr.Row(visible=False) as model_panel: with gr.Column(): gr.Markdown("### Select Model") with gr.Row(): model_choices = gr.Dropdown( choices=get_model_choices() if TEXTAI_AVAILABLE else [], label="Available Models", scale=3 ) btn_refresh_model_list = gr.Button("🔄", size="sm", scale=1) with gr.Row(): btn_load_model = gr.Button("Load Model", variant="primary") btn_close_model = gr.Button("Close") # ══════════════════════════════════════════════════════════════════════ # TAB 2: TOOLS (Models, Files, HF Hub, System, Logs, API) # ══════════════════════════════════════════════════════════════════════ with gr.Tab("Tools", id=1): with gr.Tabs(): # MODEL MANAGER with gr.Tab("Models"): gr.Markdown("### Installed Models") gr.Markdown("Click a model to load it. Download new models from **HF Hub** tab.") with gr.Row(): current_model_text = gr.Textbox( value=get_current_model_display() if TEXTAI_AVAILABLE else "No model loaded", label="Currently Loaded", interactive=False, scale=3 ) btn_refresh_models = gr.Button("🔄 Refresh", size="sm") btn_unload_model = gr.Button("Unload", size="sm", variant="stop") models_table = gr.Dataframe( headers=["●", "Name", "Type", "Size", "Custom Prompt"], value=get_models_table() if TEXTAI_AVAILABLE else [], interactive=False, row_count=6 ) model_status = gr.Textbox(label="", interactive=False, show_label=False) with gr.Accordion("Quick Install", open=False): gr.Markdown(f"**Recommended for testing:** {DEFAULT_MODEL['name']} ({DEFAULT_MODEL['size']})") with gr.Row(): btn_install_default = gr.Button("Install TinyLlama", variant="primary", size="sm") install_status = gr.Textbox(label="", interactive=False, show_label=False, scale=3) gr.Markdown("**Other suggestions:**") suggested_models = gr.Dataframe( headers=["Name", "Size", "Model ID"], value=get_suggested_models_table() if TEXTAI_AVAILABLE else [], interactive=False, row_count=4 ) # HF HUB - Model Search & Download with gr.Tab("HF Hub"): gr.Markdown("### Model Manager") gr.Markdown("#### Installed Models") installed_models_table = gr.Dataframe( headers=["Name", "Type", "Size", "Status"], value=get_initial_installed_models(), interactive=False, row_count=4 ) with gr.Row(): selected_installed_model = gr.Textbox(label="Selected", interactive=False, scale=2) btn_refresh_installed = gr.Button("🔄", size="sm") btn_delete_installed = gr.Button("🗑️ Delete", size="sm", variant="stop") gr.Markdown("---") gr.Markdown("#### Search & Add Models") with gr.Row(): hf_query = gr.Textbox( label="", placeholder="Search models... (tinyllama, mistral, phi, llama)", scale=4, show_label=False ) hf_max_params = gr.Dropdown( label="Size", choices=[("< 3B", "3"), ("< 7B", "7"), ("Any", "0")], value="7", scale=1 ) btn_hf_search = gr.Button("Search", variant="primary", scale=1) hf_status = gr.Textbox(label="", interactive=False, show_label=False) hf_results = gr.Dataframe( headers=["Model ID", "Params", "Est.Size", "Status", "Downloads"], interactive=False, row_count=8 ) selected_model_id = gr.Textbox(label="Selected", interactive=False) with gr.Row(): btn_view_files = gr.Button("View Files", scale=1) btn_auto_add = gr.Button("Auto Add (Best Q4)", variant="primary", scale=1) with gr.Column(visible=False) as file_panel: gr.Markdown("**Select Quantization:**") file_list_hub = gr.Dataframe( headers=["Filename", "Quant", "Recommended"], interactive=False, row_count=6 ) selected_file = gr.Textbox(label="Selected File", interactive=False) with gr.Row(): btn_download_file = gr.Button("Download This File", variant="primary") btn_close_files = gr.Button("Close") action_status = gr.Textbox(label="Status", interactive=False) # FILE MANAGER with gr.Tab("Files"): with gr.Row(): btn_up = gr.Button("Up", size="sm") btn_home_fm = gr.Button("Home", size="sm") btn_refresh_fm = gr.Button("Refresh", size="sm") btn_new_folder = gr.Button("+ Folder", size="sm") btn_new_file = gr.Button("+ File", size="sm") breadcrumb = gr.Textbox(value=get_breadcrumb(), label="", interactive=False) with gr.Row(): with gr.Column(scale=1): folder_tree = gr.Dataframe( headers=["", "Name", "Path"], value=get_quick_access_folders(), interactive=False, row_count=10, show_label=False ) with gr.Column(scale=3): search_input = gr.Textbox(placeholder="Search...", show_label=False) file_list = gr.Dataframe( headers=["", "Name", "Type", "Size", "Modified"], value=get_folder_contents(), interactive=False, row_count=10, show_label=False ) upload_area = gr.File(label="Upload", file_count="multiple") with gr.Column(scale=1): selected_item = gr.Textbox(label="Selected", interactive=False) preview_txt = gr.TextArea(label="Preview", lines=6, interactive=False) with gr.Row(): btn_delete_fm = gr.Button("Delete", size="sm", variant="stop") btn_download_fm = gr.Button("Download", size="sm") with gr.Row(): new_name = gr.Textbox(label="Name", scale=2) new_content = gr.TextArea(label="Content", lines=2, scale=3) status_bar = gr.Textbox(value=get_status(), label="", interactive=False) # SYSTEM with gr.Tab("System"): with gr.Row(): with gr.Column(): gr.Markdown("### System Info") btn_sys_refresh = gr.Button("Refresh") sys_info = gr.Code(value=get_system_dashboard(), language=None) with gr.Column(): gr.Markdown("### Storage") btn_storage_refresh = gr.Button("Refresh") storage_info = gr.Code(value=get_storage_stats(), language="json") # LOGS with gr.Tab("Logs"): with gr.Row(): log_type = gr.Dropdown( choices=["all", "events", "errors"], value="all", label="Type" ) log_limit = gr.Number(value=50, label="Limit") btn_refresh_logs = gr.Button("Refresh", variant="primary") btn_clear_logs = gr.Button("Clear", variant="stop") logs_display = gr.TextArea( value=get_logs_display("all", 50), lines=20, interactive=False ) # API with gr.Tab("API"): gr.Markdown("### API Endpoints") api_list = gr.Code(value=get_api_list(), language="json") gr.Markdown("### Test") btn_ping = gr.Button("Ping", variant="primary") ping_result = gr.JSON() gr.Markdown("### Quick Logs (Errors)") btn_get_errors = gr.Button("Get Recent Errors") error_logs = gr.Code(language="json") # ══════════════════════════════════════════════════════════════════════ # TAB 3: ABOUT # ══════════════════════════════════════════════════════════════════════ with gr.Tab("About", id=2): gr.Markdown(""" # TextAI v2 **AI-Powered Text Generation** Features: - Chat with local LLM models (GGUF) - Model management with HF Hub integration - Auto-download models - File manager - System monitoring --- **Keyboard Shortcuts:** - `Enter` - Send message - `Shift+Enter` - New line --- Built with Gradio | Models from HuggingFace """) # ══════════════════════════════════════════════════════════════════════════ # EVENT HANDLERS # ══════════════════════════════════════════════════════════════════════════ if TEXTAI_AVAILABLE: # New Chat def handle_new_chat(is_rp): mode = "roleplay" if is_rp else "chat" sid, hist, chats, title = ui_new_chat(mode) return sid, hist, chats, title, get_current_model_display() btn_new_chat.click( handle_new_chat, inputs=[roleplay_toggle], outputs=[current_session_id, chatbot, chat_list, chat_title, model_display] ) # Load Session def handle_load_session(evt: gr.SelectData, sessions_data): sid, hist, title, is_rp = ui_load_session(evt, sessions_data) return sid, hist, title, is_rp chat_list.select( handle_load_session, inputs=[chat_list], outputs=[current_session_id, chatbot, chat_title, roleplay_toggle] ) # Send Message def handle_send(sid, msg, hist, tokens, temp, is_rp): for result in ui_send_message(sid, msg, hist, tokens, temp, is_rp): hist_out, _, sid_out, chats = result yield sid_out, hist_out, "", chats, get_current_model_display() btn_send.click( handle_send, inputs=[current_session_id, chat_input, chatbot, max_tokens, temperature, roleplay_toggle], outputs=[current_session_id, chatbot, chat_input, chat_list, model_display] ) chat_input.submit( handle_send, inputs=[current_session_id, chat_input, chatbot, max_tokens, temperature, roleplay_toggle], outputs=[current_session_id, chatbot, chat_input, chat_list, model_display] ) # Menu Actions def handle_menu(choice, sid): if choice == "Rename Chat": return gr.update(visible=True), gr.update(visible=False) elif choice == "Switch Model": return gr.update(visible=False), gr.update(visible=True) elif choice == "Delete Chat": if sid: session_manager.delete_session(sid) return gr.update(visible=False), gr.update(visible=False) elif choice == "Clear Chat": if sid: session_manager.clear_session(sid) return gr.update(visible=False), gr.update(visible=False) return gr.update(visible=False), gr.update(visible=False) menu_dropdown.change( handle_menu, inputs=[menu_dropdown, current_session_id], outputs=[rename_panel, model_panel] ) # Rename def handle_rename(sid, new_title): chats, title = ui_rename_session(sid, new_title) return chats, title, gr.update(visible=False) btn_rename.click( handle_rename, inputs=[current_session_id, rename_input], outputs=[chat_list, chat_title, rename_panel] ) btn_cancel_rename.click( lambda: gr.update(visible=False), outputs=[rename_panel] ) # Model Panel btn_close_model.click( lambda: gr.update(visible=False), outputs=[model_panel] ) def handle_quick_load_model(choice): if not choice or choice == "No models available": return get_current_model_display(), gr.update(visible=False) name = choice.replace("✓ ", "").split(" (")[0] models = model_manager.get_available_models() for m in models: if m["name"] == name: model_manager.load_model(m["id"]) break return get_current_model_display(), gr.update(visible=False) btn_load_model.click( handle_quick_load_model, inputs=[model_choices], outputs=[model_display, model_panel] ) def refresh_model_choices(): choices = get_model_choices() return gr.update(choices=choices) btn_refresh_model_list.click( refresh_model_choices, outputs=[model_choices] ) # Model Manager (Tools tab) btn_refresh_models.click( lambda: (get_models_table(), get_current_model_display()), outputs=[models_table, current_model_text] ) models_table.select( ui_load_model_by_index, inputs=[models_table], outputs=[models_table, current_model_text, model_status] ) btn_unload_model.click( ui_unload_model, outputs=[models_table, current_model_text, model_status] ) # Install Default Model def handle_install_default(): result = download_default_model() return result, get_models_table(), get_current_model_display() btn_install_default.click( handle_install_default, outputs=[install_status, models_table, current_model_text] ) # File Manager Events folder_tree.select(on_folder_tree_select, [folder_tree], [file_list, breadcrumb, status_bar, folder_tree]) file_list.select( navigate_from_main, [file_list], [file_list, breadcrumb, status_bar, folder_tree, selected_item, gr.State(None), preview_txt, gr.State("")] ) btn_up.click(go_up, outputs=[file_list, breadcrumb, status_bar, folder_tree]) btn_home_fm.click(go_home, outputs=[file_list, breadcrumb, status_bar, folder_tree]) btn_refresh_fm.click(refresh_all, outputs=[file_list, breadcrumb, status_bar, folder_tree]) btn_new_folder.click(ui_create_folder, [new_name], [file_list, breadcrumb, status_bar, folder_tree]) btn_new_file.click(ui_create_file, [new_name, new_content], [file_list, breadcrumb, status_bar, folder_tree]) btn_delete_fm.click(ui_delete, [selected_item], [file_list, breadcrumb, status_bar, folder_tree]) upload_area.change(upload_files, [upload_area], [file_list, breadcrumb, status_bar, folder_tree]) search_input.submit(search_files, [search_input], [file_list, breadcrumb, status_bar]) # HF Hub Events def get_installed_models_list(): rows = [] try: if not TEXT_MODELS_DIR.exists(): return rows for d in TEXT_MODELS_DIR.iterdir(): if d.is_dir(): gguf_files = list(d.glob("*.gguf")) if gguf_files: for f in gguf_files: size = format_size(f.stat().st_size) rows.append([f.stem, "GGUF", size, "Ready"]) elif (d / "config.json").exists(): total_size = sum(f.stat().st_size for f in d.rglob("*") if f.is_file()) rows.append([d.name, "Transformers", format_size(total_size), "Ready"]) for f in TEXT_MODELS_DIR.glob("*.gguf"): size = format_size(f.stat().st_size) rows.append([f.stem, "GGUF", size, "Ready"]) except Exception as e: logger.error("HFHub", f"List installed error: {str(e)}") return rows def refresh_installed(): rows = get_installed_models_list() choices = get_model_choices() if TEXTAI_AVAILABLE else [] return rows, gr.update(choices=choices) btn_refresh_installed.click( refresh_installed, outputs=[installed_models_table, model_choices] ) def select_installed_model(evt: gr.SelectData, data): try: row_idx = evt.index[0] if isinstance(evt.index, (list, tuple)) else evt.index if hasattr(data, 'values'): data_list = data.values.tolist() else: data_list = data if data_list and row_idx < len(data_list): return data_list[row_idx][0] except Exception as e: logger.error("HFHub", f"Select installed error: {str(e)}") return "" installed_models_table.select( select_installed_model, [installed_models_table], [selected_installed_model] ) def delete_installed_model(model_name): if not model_name: return get_installed_models_list(), "", "Select a model first", gr.update() result = remove_model(model_name) rows = get_installed_models_list() choices = get_model_choices() if TEXTAI_AVAILABLE else [] return rows, "", result, gr.update(choices=choices) btn_delete_installed.click( delete_installed_model, inputs=[selected_installed_model], outputs=[installed_models_table, selected_installed_model, action_status, model_choices] ) def handle_hf_search(query, max_params_str): max_params = float(max_params_str) if max_params_str != "0" else None rows, status, total = search_hf_models_enhanced( query=query, task="text-generation", library="gguf", sort="downloads", max_params=max_params, recommended_only=False, limit=30, offset=0 ) simple_rows = [[r[0], r[1], r[2], r[3], r[4]] for r in rows] return simple_rows, status btn_hf_search.click( handle_hf_search, [hf_query, hf_max_params], [hf_results, hf_status] ) hf_query.submit( handle_hf_search, [hf_query, hf_max_params], [hf_results, hf_status] ) def handle_select_model(evt: gr.SelectData, results_data): try: row_idx = evt.index[0] if isinstance(evt.index, (list, tuple)) else evt.index if hasattr(results_data, 'values'): data_list = results_data.values.tolist() elif hasattr(results_data, 'tolist'): data_list = results_data.tolist() else: data_list = results_data if data_list and row_idx < len(data_list): return data_list[row_idx][0] except Exception as e: logger.error("HFHub", f"Select error: {str(e)}") return "" hf_results.select(handle_select_model, [hf_results], [selected_model_id]) def handle_view_files(model_id): if not model_id: return gr.update(visible=False), [], "", "Select a model first" files = get_model_files(model_id) if not files: return gr.update(visible=False), [], "", "No GGUF files found" rows = [] for f in files: rec = "✓" if f["recommended"] else "" rows.append([f["filename"], f["quant"], rec]) rows.sort(key=lambda x: (x[2] != "✓", x[0])) return gr.update(visible=True), rows, "", f"Found {len(files)} files" btn_view_files.click( handle_view_files, [selected_model_id], [file_panel, file_list_hub, selected_file, action_status] ) def handle_select_file(evt: gr.SelectData, files_data): try: row_idx = evt.index[0] if isinstance(evt.index, (list, tuple)) else evt.index if hasattr(files_data, 'values'): data_list = files_data.values.tolist() elif hasattr(files_data, 'tolist'): data_list = files_data.tolist() else: data_list = files_data if data_list and row_idx < len(data_list): return data_list[row_idx][0] except Exception as e: logger.error("HFHub", f"File select error: {str(e)}") return "" file_list_hub.select(handle_select_file, [file_list_hub], [selected_file]) def handle_download_file(model_id, filename): if not model_id or not filename: return "Select model and file" result = download_model_file(model_id, filename) return result btn_download_file.click( handle_download_file, [selected_model_id, selected_file], [action_status] ) btn_close_files.click( lambda: gr.update(visible=False), outputs=[file_panel] ) def handle_auto_add(model_id): if not model_id: return "Select a model first", get_installed_models_list(), gr.update() files = get_model_files(model_id) if not files: return "No GGUF files found for this model", get_installed_models_list(), gr.update() best_file = None priority = ["Q4_K_M", "Q4_K_S", "Q5_K_M", "Q4_0", "Q5_0"] for quant in priority: for f in files: if f["quant"] == quant: best_file = f["filename"] break if best_file: break if not best_file and files: best_file = files[0]["filename"] if best_file: result = download_model_file(model_id, best_file) installed = get_installed_models_list() choices = get_model_choices() if TEXTAI_AVAILABLE else [] return result, installed, gr.update(choices=choices) return "Could not find suitable file", get_installed_models_list(), gr.update() btn_auto_add.click( handle_auto_add, [selected_model_id], [action_status, installed_models_table, model_choices] ) # System btn_sys_refresh.click(get_system_dashboard, outputs=[sys_info]) btn_storage_refresh.click(get_storage_stats, outputs=[storage_info]) # Logs btn_refresh_logs.click(ui_refresh_logs, [log_type, log_limit], [logs_display]) btn_clear_logs.click(ui_clear_logs, [log_type], [logs_display, gr.State("")]) # API btn_ping.click(ping, outputs=[ping_result]) btn_get_errors.click(lambda: api_get_logs("errors", 50), outputs=[error_logs]) # ══════════════════════════════════════════════════════════════════════════════ # LAUNCH # ══════════════════════════════════════════════════════════════════════════════ if __name__ == "__main__": logger.info("System", "TextAI v2 starting", {"version": VERSION}) demo.launch(theme=get_theme(), css=DARK_CSS + CHAT_CSS)