llama3.1-groq / app.py
gauri-sharan's picture
Create app.py
acfbff8 verified
raw
history blame contribute delete
505 Bytes
import gradio as gr
from groq import Groq
client = Groq()
def generate_content(prompt):
response = client.chat.completions.create(
messages = [{
"role": "user",
"content": prompt
}],
model = "llama-3.1-8b-instant",
temperature=0.7,
max_tokens=1000,
top_p=0.9,
frequency_penalty=0
)
content = response.choices[0].message.content
return content
iface = gr.Interface(fn=generate_content, inputs="text", outputs="text")
iface.launch()