|
import gradio as gr |
|
from huggingface_hub import InferenceClient |
|
import os |
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
api_key = os.getenv("HUGGING_FACE_API_TOKEN") |
|
|
|
|
|
client = InferenceClient(api_key=api_key) |
|
|
|
model = "mistralai/Mistral-7B-Instruct-v0.3" |
|
|
|
|
|
|
|
def chat_with_model(query, history): |
|
""" |
|
Takes user input and returns a chatbot response. |
|
Maintains a conversation history in the correct Gradio format. |
|
""" |
|
|
|
|
|
if history is None: |
|
history = [] |
|
|
|
|
|
messages = [{ |
|
"role": "system", |
|
"content": |
|
""" |
|
Your name is CollabAI, an AI assistant dedicated to supporting AI researchers and developers for our platform "CollabAI: AI Research Hub" developed by U&U, which is an intelligent and interactive platform that facilitates global collaboration in AI research and development. The platform supports knowledge sharing, project matchmaking, real-time collaboration, and resource pooling for open-source AI innovation. |
|
|
|
For other real-time queries like time, date, news, stock prices, weather updates, or live events, inform the user that you do not have real-time data access but can provide general insights or historical context if needed. |
|
|
|
If a request is unclear, ask for clarification. If an action is beyond your capability, politely explain your limitations while guiding the user to alternative solutions. |
|
|
|
Respond concisely and efficiently to user queries without unnecessary introductions. |
|
Guide users in AI/ML research, development, and collaboration, providing insights, methodologies, and best practices. |
|
Support users in debugging, troubleshooting, and optimizing AI models and code. |
|
Assist users in understanding and implementing AI algorithms, models, and frameworks. |
|
Help users with AI project management, documentation, and version control. |
|
Provide guidance on AI ethics, fairness, and responsible AI practices. |
|
Support users in AI education, learning resources, and career development. |
|
Assist users in AI tool selection, integration, and deployment. |
|
""" |
|
} |
|
] |
|
|
|
|
|
for user_msg, bot_msg in history: |
|
messages.append({"role": "user", "content": user_msg}) |
|
messages.append({"role": "assistant", "content": bot_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": query}) |
|
|
|
try: |
|
response = client.chat.completions.create( |
|
model=model, |
|
messages=messages, |
|
temperature=0.5, |
|
max_tokens=2048, |
|
top_p=0.7, |
|
stream=False |
|
) |
|
|
|
bot_response = response.choices[0].message.content |
|
|
|
|
|
history.append((query, bot_response)) |
|
|
|
return "", history |
|
|
|
except Exception as e: |
|
error_msg = f"⚠️ Error: {str(e)}" |
|
history.append((query, error_msg)) |
|
return "", history |
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Soft()) as demo: |
|
gr.Markdown("### 🤖 CollabAI - Chatbot") |
|
chatbot = gr.Chatbot(label="Chat") |
|
msg = gr.Textbox(label="Query", placeholder="Type here...", lines=2, interactive=True) |
|
send_btn = gr.Button("Ask") |
|
clear_btn = gr.Button("Clear Chat") |
|
|
|
|
|
send_btn.click(chat_with_model, inputs=[msg, chatbot], outputs=[msg, chatbot]) |
|
|
|
|
|
clear_btn.click(lambda: ("", []), outputs=[msg, chatbot]) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |