ofai-kai-backup / app.py
YaserDS-777's picture
Update app.py
1098c9d verified
raw
history blame
1.04 kB
import os
import gradio as gr
from transformers import pipeline
# Get the Hugging Face API token from environment variables
api_token = os.getenv("HUGGINGFACE_API_TOKEN_V")
print('----------------',api_token,'-----------------')
# Check if the API token is set
if not api_token:
raise ValueError("API token is not set. Please set the HUGGINGFACE_API_TOKEN environment variable.")
# Initialize the text generation pipeline with authentication
pipe = pipeline("text-generation", model="meta-llama/Meta-Llama-3.1-405B", use_auth_token=True)
# Define the function to generate text
def generate_text(prompt):
result = pipe(prompt, max_length=50, num_return_sequences=1)
return result[0]['generated_text']
# Create a Gradio interface
iface = gr.Interface(
fn=generate_text,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your prompt here..."),
outputs="text",
title="Meta-Llama Text Generation",
description="Generate text using the Meta-Llama 3.1 405B model."
)
# Launch the interface
iface.launch()