import gradio as gr from langchain.chat_models import init_chat_model import os key = os.getenv("groqdummy") def groq_endpoint(message, history): # Create a ChatGroq LLM on-the-fly, or reuse a global one. model = init_chat_model("llama3-8b-8192", model_provider="groq",api_key=key) result = model.invoke(message) return str (result.content) # demo = gr.ChatInterface(fn=groq_endpoint, type="messages", examples=["hello","merhaba"], title="DS-10_Chatbot") # demo.launch(debug=True) css = """ label[data-testid="block-label"] { display: none !important; } footer { display: none !important; } """ js_func = """ function refresh() { const url = new URL(window.location); if (url.searchParams.get('__theme') !== 'dark') { url.searchParams.set('__theme', 'dark'); window.location.href = url.href; } } """ with gr.Blocks(css=css, js = js_func, theme="monochrome") as demo: chatbot = gr.ChatInterface(fn=groq_endpoint, type="messages", examples=["hello","merhaba"], title="DS-10_Chatbot") demo.launch(debug=True)