code-review / sample.py
Yashnik's picture
Update sample.py
77963d2 verified
raw
history blame contribute delete
694 Bytes
import os
import gradio as gr
from transformers import pipeline
# Get the token from environment variables
# token = os.getenv("HUGGINGFACE_TOKEN")
# if token is None:
# raise ValueError("Hugging Face token is not set in the environment variables.")
# Load the model from the Hugging Face Model Hub with authentication
generator = pipeline('text-generation', model='bigcode/starcoder', use_auth_token=token)
# Define the prediction function
def generate_text(prompt):
result = generator(prompt, max_length=50)
return result[0]['generated_text']
# Create the Gradio interface
iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
# Launch the app
iface.launch()