rbt2025 commited on
Commit
ba3cc2b
Β·
verified Β·
1 Parent(s): 72abdff

Deploy TextAI v2 - Clean architecture

Browse files
Files changed (2) hide show
  1. app.py +52 -54
  2. ui/chat.py +7 -19
app.py CHANGED
@@ -21,30 +21,27 @@ def create_app():
21
  state.sync_with_filesystem()
22
  logger.info("App", f"TextAI v{VERSION} starting")
23
 
24
- with gr.Blocks(
25
- title=APP_TITLE,
26
- theme=get_theme(),
27
- css=THEME_CSS
28
- ) as app:
29
 
30
  with gr.Tabs() as main_tabs:
31
 
32
  # ══════════════════════════════════════════════════════════
33
  # TAB 1: CHAT
34
  # ══════════════════════════════════════════════════════════
35
- with gr.Tab("Chat", id=0):
36
  chat_components = build_chat_ui()
37
 
38
  # ══════════════════════════════════════════════════════════
39
  # TAB 2: MODELS
40
  # ══════════════════════════════════════════════════════════
41
- with gr.Tab("Models", id=1):
42
  model_components = build_model_manager_ui()
43
 
44
  # ══════════════════════════════════════════════════════════
45
  # TAB 3: TOOLS
46
  # ══════════════════════════════════════════════════════════
47
- with gr.Tab("Tools", id=2):
48
  with gr.Tabs():
49
 
50
  # System Info
@@ -110,28 +107,28 @@ def create_app():
110
  with gr.Tab("API"):
111
  gr.Markdown("### API Endpoints")
112
  gr.Markdown("""
113
- All endpoints return JSON. Use for integration with local apps.
114
-
115
- **Models:**
116
- - `api_list_models()` - List installed models
117
- - `api_load_model(model_id)` - Load a model
118
- - `api_unload_model()` - Unload current model
119
- - `api_search_models(query, max_params, limit)` - Search HF
120
- - `api_download_model(repo_id, filename)` - Download model
121
-
122
- **Sessions:**
123
- - `api_list_sessions()` - List all sessions
124
- - `api_create_session(title, type, system_prompt)` - New session
125
- - `api_delete_session(session_id)` - Delete session
126
-
127
- **Chat:**
128
- - `api_chat(session_id, message, max_tokens, temperature)` - Chat
129
- - `api_inference(prompt, messages, system_prompt, ...)` - Direct inference
130
-
131
- **System:**
132
- - `api_health()` - Health check
133
- - `api_get_status()` - Full status
134
- - `api_get_backends()` - Available backends
135
  """)
136
 
137
  gr.Markdown("### Test")
@@ -146,43 +143,42 @@ def create_app():
146
  # ══════════════════════════════════════════════════════════
147
  # TAB 4: ABOUT
148
  # ══════════════════════════════════════════════════════════
149
- with gr.Tab("About", id=3):
150
  gr.Markdown(f"""
151
- # TextAI v{VERSION}
152
 
153
- **Local AI Chat Assistant**
154
 
155
- A clean, professional chat interface for running local LLM models.
156
- Inspired by ChatGPT & Grok.
157
 
158
- ---
159
 
160
- ### Features
161
 
162
- - πŸ€– **Chat** - Natural conversation with AI
163
- - πŸ“ **Session Management** - Auto-save, rename, delete chats
164
- - πŸ”§ **Model Manager** - Download, load, configure models
165
- - πŸ” **HuggingFace Search** - Find and download GGUF models
166
- - πŸŽ›οΈ **Customization** - System prompts, temperature, tokens
167
- - πŸ“‘ **API** - Full API for integration with other apps
168
 
169
- ---
170
 
171
- ### Supported Models
172
 
173
- - **GGUF** - Via llama-cpp-python (recommended)
174
- - **Transformers** - Via HuggingFace transformers
175
 
176
- ---
177
 
178
- ### Keyboard Shortcuts
179
 
180
- - `Enter` - Send message
181
- - `Shift+Enter` - New line
182
 
183
- ---
184
 
185
- Built with ❀️ using Gradio
186
  """)
187
 
188
  return app
@@ -197,5 +193,7 @@ if __name__ == "__main__":
197
  app.launch(
198
  server_name="0.0.0.0",
199
  server_port=7860,
200
- share=False
 
 
201
  )
 
21
  state.sync_with_filesystem()
22
  logger.info("App", f"TextAI v{VERSION} starting")
23
 
24
+ # Gradio 6.0: theme/css moved to launch(), but we use Blocks() for structure
25
+ with gr.Blocks(title=APP_TITLE) as app:
 
 
 
26
 
27
  with gr.Tabs() as main_tabs:
28
 
29
  # ══════════════════════════════════════════════════════════
30
  # TAB 1: CHAT
31
  # ══════════════════════════════════════════════════════════
32
+ with gr.Tab("Chat", id="chat"):
33
  chat_components = build_chat_ui()
34
 
35
  # ══════════════════════════════════════════════════════════
36
  # TAB 2: MODELS
37
  # ══════════════════════════════════════════════════════════
38
+ with gr.Tab("Models", id="models"):
39
  model_components = build_model_manager_ui()
40
 
41
  # ══════════════════════════════════════════════════════════
42
  # TAB 3: TOOLS
43
  # ══════════════════════════════════════════════════════════
44
+ with gr.Tab("Tools", id="tools"):
45
  with gr.Tabs():
46
 
47
  # System Info
 
107
  with gr.Tab("API"):
108
  gr.Markdown("### API Endpoints")
109
  gr.Markdown("""
110
+ All endpoints return JSON. Use for integration with local apps.
111
+
112
+ **Models:**
113
+ - `api_list_models()` - List installed models
114
+ - `api_load_model(model_id)` - Load a model
115
+ - `api_unload_model()` - Unload current model
116
+ - `api_search_models(query, max_params, limit)` - Search HF
117
+ - `api_download_model(repo_id, filename)` - Download model
118
+
119
+ **Sessions:**
120
+ - `api_list_sessions()` - List all sessions
121
+ - `api_create_session(title, type, system_prompt)` - New session
122
+ - `api_delete_session(session_id)` - Delete session
123
+
124
+ **Chat:**
125
+ - `api_chat(session_id, message, max_tokens, temperature)` - Chat
126
+ - `api_inference(prompt, messages, system_prompt, ...)` - Direct inference
127
+
128
+ **System:**
129
+ - `api_health()` - Health check
130
+ - `api_get_status()` - Full status
131
+ - `api_get_backends()` - Available backends
132
  """)
133
 
134
  gr.Markdown("### Test")
 
143
  # ══════════════════════════════════════════════════════════
144
  # TAB 4: ABOUT
145
  # ══════════════════════════════════════════════════════════
146
+ with gr.Tab("About", id="about"):
147
  gr.Markdown(f"""
148
+ # TextAI v{VERSION}
149
 
150
+ **Local AI Chat Assistant**
151
 
152
+ A clean, professional chat interface for running local LLM models.
 
153
 
154
+ ---
155
 
156
+ ### Features
157
 
158
+ - Chat - Natural conversation with AI
159
+ - Session Management - Auto-save, rename, delete chats
160
+ - Model Manager - Download, load, configure models
161
+ - HuggingFace Search - Find and download GGUF models
162
+ - Customization - System prompts, temperature, tokens
163
+ - API - Full API for integration with other apps
164
 
165
+ ---
166
 
167
+ ### Supported Models
168
 
169
+ - **GGUF** - Via llama-cpp-python (recommended)
170
+ - **Transformers** - Via HuggingFace transformers
171
 
172
+ ---
173
 
174
+ ### Keyboard Shortcuts
175
 
176
+ - `Enter` - Send message
177
+ - `Shift+Enter` - New line
178
 
179
+ ---
180
 
181
+ Built with Gradio
182
  """)
183
 
184
  return app
 
193
  app.launch(
194
  server_name="0.0.0.0",
195
  server_port=7860,
196
+ share=False,
197
+ theme=get_theme(),
198
+ css=THEME_CSS
199
  )
ui/chat.py CHANGED
@@ -44,20 +44,13 @@ def build_chat_ui():
44
  model = service.get_loaded_model()
45
  return f"● {model['name']}" if model else "No model loaded"
46
 
47
- def format_history(session_id: str) -> List[dict]:
48
  """Format session messages for Gradio chatbot"""
49
  service = get_session_service()
50
  display = service.get_session_for_display(session_id)
51
  if not display:
52
  return []
53
- # Convert to new message format
54
- messages = []
55
- for user_msg, bot_msg in display.get("history", []):
56
- if user_msg:
57
- messages.append({"role": "user", "content": user_msg})
58
- if bot_msg:
59
- messages.append({"role": "assistant", "content": bot_msg})
60
- return messages
61
 
62
  # ══════════════════════════════════════════════════════════════════
63
  # EVENT HANDLERS
@@ -111,10 +104,7 @@ def build_chat_ui():
111
 
112
  # Check model
113
  if not model_service.is_model_loaded():
114
- history = history + [
115
- {"role": "user", "content": message},
116
- {"role": "assistant", "content": "⚠️ Please load a model first. Go to Models tab."}
117
- ]
118
  return history, "", session_id, get_sessions_for_sidebar()
119
 
120
  # Create session if needed
@@ -122,8 +112,7 @@ def build_chat_ui():
122
  session = session_service.create_session(system_prompt=system_prompt)
123
  session_id = session["id"]
124
 
125
- # Add user message
126
- history = history + [{"role": "user", "content": message}]
127
  session_service.add_message(session_id, "user", message)
128
 
129
  # Build messages for model
@@ -138,10 +127,10 @@ def build_chat_ui():
138
  try:
139
  response = model_service.generate(messages, max_tokens, temperature)
140
  session_service.add_message(session_id, "assistant", response)
141
- history = history + [{"role": "assistant", "content": response}]
142
  except Exception as e:
143
  logger.error("Chat", f"Generation error: {e}")
144
- history = history + [{"role": "assistant", "content": f"⚠️ Error: {e}"}]
145
 
146
  return history, "", session_id, get_sessions_for_sidebar()
147
 
@@ -199,7 +188,7 @@ def build_chat_ui():
199
  interactive=False,
200
  show_label=False,
201
  row_count=10,
202
- col_count=(2, "fixed"),
203
  elem_id="sessions-table"
204
  )
205
 
@@ -237,7 +226,6 @@ def build_chat_ui():
237
  show_label=False,
238
  height=400,
239
  elem_id="chatbot",
240
- type="messages",
241
  autoscroll=False
242
  )
243
 
 
44
  model = service.get_loaded_model()
45
  return f"● {model['name']}" if model else "No model loaded"
46
 
47
+ def format_history(session_id: str) -> List[Tuple[str, str]]:
48
  """Format session messages for Gradio chatbot"""
49
  service = get_session_service()
50
  display = service.get_session_for_display(session_id)
51
  if not display:
52
  return []
53
+ return display.get("history", [])
 
 
 
 
 
 
 
54
 
55
  # ══════════════════════════════════════════════════════════════════
56
  # EVENT HANDLERS
 
104
 
105
  # Check model
106
  if not model_service.is_model_loaded():
107
+ history = history + [(message, "⚠️ Please load a model first. Go to Models tab.")]
 
 
 
108
  return history, "", session_id, get_sessions_for_sidebar()
109
 
110
  # Create session if needed
 
112
  session = session_service.create_session(system_prompt=system_prompt)
113
  session_id = session["id"]
114
 
115
+ # Add user message to session
 
116
  session_service.add_message(session_id, "user", message)
117
 
118
  # Build messages for model
 
127
  try:
128
  response = model_service.generate(messages, max_tokens, temperature)
129
  session_service.add_message(session_id, "assistant", response)
130
+ history = history + [(message, response)]
131
  except Exception as e:
132
  logger.error("Chat", f"Generation error: {e}")
133
+ history = history + [(message, f"⚠️ Error: {e}")]
134
 
135
  return history, "", session_id, get_sessions_for_sidebar()
136
 
 
188
  interactive=False,
189
  show_label=False,
190
  row_count=10,
191
+ column_count=(2, "fixed"),
192
  elem_id="sessions-table"
193
  )
194
 
 
226
  show_label=False,
227
  height=400,
228
  elem_id="chatbot",
 
229
  autoscroll=False
230
  )
231