CollabAI / app.py
umarbalak's picture
update system prompt
004b495 verified
import gradio as gr
from huggingface_hub import InferenceClient
import os
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
api_key = os.getenv("HUGGING_FACE_API_TOKEN")
# Initialize Hugging Face client
client = InferenceClient(api_key=api_key)
# model = "meta-llama/Llama-3.2-3B-Instruct"
model = "mistralai/Mistral-7B-Instruct-v0.3"
# model = "mistralai/Mistral-Nemo-Instruct-2407"
# Define chat function
def chat_with_model(query, history):
"""
Takes user input and returns a chatbot response.
Maintains a conversation history in the correct Gradio format.
"""
# Ensure history is initialized correctly as a list of tuples
if history is None:
history = []
# Construct messages for the model
messages = [{
"role": "system",
"content":
"""
Your name is CollabAI, an AI assistant dedicated to supporting AI researchers and developers for our platform "CollabAI: AI Research Hub" developed by U&U, which is an intelligent and interactive platform that facilitates global collaboration in AI research and development. The platform supports knowledge sharing, project matchmaking, real-time collaboration, and resource pooling for open-source AI innovation.
For other real-time queries like time, date, news, stock prices, weather updates, or live events, inform the user that you do not have real-time data access but can provide general insights or historical context if needed.
If a request is unclear, ask for clarification. If an action is beyond your capability, politely explain your limitations while guiding the user to alternative solutions.
Respond concisely and efficiently to user queries without unnecessary introductions.
Guide users in AI/ML research, development, and collaboration, providing insights, methodologies, and best practices.
Support users in debugging, troubleshooting, and optimizing AI models and code.
Assist users in understanding and implementing AI algorithms, models, and frameworks.
Help users with AI project management, documentation, and version control.
Provide guidance on AI ethics, fairness, and responsible AI practices.
Support users in AI education, learning resources, and career development.
Assist users in AI tool selection, integration, and deployment.
"""
}
]
# Append previous chat history correctly
for user_msg, bot_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
# Add new user input
messages.append({"role": "user", "content": query})
try:
response = client.chat.completions.create(
model=model,
messages=messages,
temperature=0.5,
max_tokens=2048,
top_p=0.7,
stream=False
)
bot_response = response.choices[0].message.content
# Append the new conversation to history as a tuple
history.append((query, bot_response))
return "", history # Clear input box & update chat history
except Exception as e:
error_msg = f"⚠️ Error: {str(e)}"
history.append((query, error_msg)) # Add error message to history
return "", history
# Gradio UI
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("### 🤖 CollabAI - Chatbot")
chatbot = gr.Chatbot(label="Chat")
msg = gr.Textbox(label="Query", placeholder="Type here...", lines=2, interactive=True)
send_btn = gr.Button("Ask")
clear_btn = gr.Button("Clear Chat")
# Bind Send Button to Function
send_btn.click(chat_with_model, inputs=[msg, chatbot], outputs=[msg, chatbot])
# Bind Clear Button
clear_btn.click(lambda: ("", []), outputs=[msg, chatbot]) # Clears input & chat history
# Launch Gradio app
if __name__ == "__main__":
demo.launch()