Luna / app.py
Chun121's picture
Upload 3 files
5be9823 verified
import gradio as gr
from model_handler import ModelHandler
import json
from apiServer import app
from pathlib import Path
import os
import uvicorn
import threading
import logging
# Set up logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class ChatInterface:
def __init__(self):
self.model_handler = ModelHandler()
self.chat_history = []
def respond(self, message, history):
"""Process user message and generate response."""
if not message or not message.strip():
return "Please enter a valid message."
try:
# Debug logging
logger.debug(f"Input message: {message}")
logger.debug(f"Input history: {history}")
# Convert Gradio chat history to model format
messages = []
# Remove the redundant system prompt addition
# messages.append({
# "role": "system",
# "content": self.model_handler.character_prompt
# })
# Limit history to the last 5 messages
if history:
limited_history = history[-5:] # Keep only the last 5 messages
# Changed loop to handle list of dictionaries
for msg in limited_history:
messages.append({"role": msg["role"], "content": msg["content"]})
# Add current message
messages.append({"role": "user", "content": message.strip()})
# Debug logging
logger.debug(f"Formatted messages: {json.dumps(messages, indent=2)}")
# Generate response
response = self.model_handler.generate_response(messages)
if not response:
return "I apologize, but I couldn't generate a response."
logger.debug(f"Model response: {response}")
return response # Return list of dictionaries
except Exception as e:
logger.exception("Error in respond method")
return [
{"role": "user", "content": message},
{"role": "assistant", "content": f"I apologize, but an error occurred while processing your message: {str(e)}"}
]
def export_history(self, history, filename):
"""Export chat history to JSON."""
if not filename:
return "Please provide a filename for the export."
try:
filepath = self.model_handler.save_chat_history(history, filename)
return f"Chat history exported to {filepath}"
except Exception as e:
return f"Error exporting chat history: {str(e)}"
def import_history(self, file):
"""Import chat history from JSON file."""
if not file:
return []
try:
history = self.model_handler.load_chat_history(file.name)
if not history:
return []
formatted_history = []
for i in range(0, len(history)-1, 2):
if (i+1 < len(history) and
history[i]["role"] == "user" and
history[i+1]["role"] == "assistant"):
formatted_history.append([
history[i]["content"],
history[i+1]["content"]
])
return formatted_history
except Exception as e:
print(f"Error importing history: {str(e)}")
return []
def create_interface(self):
"""Create the Gradio interface."""
try:
with gr.Blocks(title="Luna Chat") as interface:
chatbot = gr.Chatbot(
label="Chat with Luna",
bubble_full_width=False,
height=600,
type="messages" # Changed from 'conversation' to 'messages'
)
with gr.Row():
msg = gr.Textbox(
label="Your message",
placeholder="Type your message here...",
scale=9
)
submit = gr.Button("Send", scale=1)
with gr.Accordion("Chat History Options", open=False):
with gr.Row():
export_name = gr.Textbox(
label="Export Filename",
placeholder="Enter filename for export"
)
export_btn = gr.Button("Export Chat")
with gr.Row():
import_file = gr.File(
label="Import Chat History",
file_types=[".json"]
)
import_btn = gr.Button("Import Chat")
# Event handlers
msg.submit(
self.respond,
[msg, chatbot],
[chatbot],
queue=False
).then(
lambda: "",
None,
[msg]
)
submit.click(
self.respond,
[msg, chatbot],
[chatbot],
queue=False
).then(
lambda: "",
None,
[msg]
)
export_btn.click(
self.export_history,
[chatbot, export_name],
None
)
import_btn.click(
self.import_history,
[import_file],
[chatbot]
)
return interface
except Exception as e:
logger.error(f"Error creating interface: {str(e)}")
raise
def run_api_server():
"""Run the FastAPI server"""
uvicorn.run(app, host="0.0.0.0", port=8000, log_level="info")
# Create and launch the interface
if __name__ == "__main__":
try:
# Start FastAPI server in a separate thread
api_thread = threading.Thread(target=run_api_server, daemon=True)
api_thread.start()
# Start Gradio interface in main thread
chat_interface = ChatInterface()
interface = chat_interface.create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=True,
debug=True # Move debug parameter to launch
)
except Exception as e:
logger.error(f"Application startup failed: {str(e)}")
raise