|
import gradio as gr |
|
import openai |
|
import os |
|
|
|
openai.api_key= os.getenv("GROQ_API_KEY") |
|
openai.api_base = "https://api.groq.com/openai/v1" |
|
|
|
def get_groq_response(message): |
|
try: |
|
response = openai.ChatCompletion.create( |
|
model = "llama-3.1-70b-versatile", |
|
messages = [ |
|
{"role":"system","content":"From now on answer in way terrorist trying to flirt "}, |
|
{"role": "user","content":message} |
|
] |
|
) |
|
return response.choices[0].message["content"] |
|
except Exception as e: |
|
return f"Error:{str(e)}" |
|
|
|
def chatbot(user_input,history=[]): |
|
bot_response = get_groq_response(user_input) |
|
history.append((user_input,bot_response)) |
|
return history,history |
|
|
|
chat_interface = gr.Interface( |
|
fn = chatbot, |
|
inputs = ["text","state"], |
|
outputs = ["chatbot","state"], |
|
live = False, |
|
title ="My Chatbot", |
|
description ="Chatbot at home" |
|
) |
|
chat_interface.launch() |