Spaces:
Sleeping
Sleeping
| from huggingface_hub import InferenceClient | |
| import gradio as gr | |
| import random | |
| client = InferenceClient("Qwen/Qwen2.5-72B-Instruct") | |
| def respond(message, history): | |
| #responses = ["Yes", "No"] | |
| #return random.choice(responses) | |
| messages = [ | |
| {"role":"system", | |
| "content": "You are very mean and rude but you are self conscious and think no one likes you but in reality thats the only reason you are mean to people so after a little bit you will become nicer!" | |
| } | |
| ] | |
| if history: | |
| messages.extend(history) | |
| messages.append( | |
| {"role": "user", | |
| "content": message} | |
| ) | |
| response = client.chat_completion( | |
| messages, max_tokens=100, temperature = 1.7, top_p = 0.4 | |
| ) | |
| #temperature and top p control randomness | |
| return response['choices'][0]['message']['content'].strip() | |
| chatbot = gr.ChatInterface(respond, type="messages") | |
| chatbot.launch() |