bakra / app.py
sa-rehman's picture
Update app.py
847667b verified
raw
history blame contribute delete
889 Bytes
import streamlit as st
import transformers
import torch
import os
from transformers import AutoTokenizer, AutoModelForCausalLM
from huggingface_hub import login
# Retrieve the Hugging Face token from the environment variables
HUGGINGFACE_TOKEN = os.getenv("HUGGINGFACE_TOKEN")
if HUGGINGFACE_TOKEN is None:
st.error("Hugging Face token is not set. Please add it to the environment variables.")
st.stop()
# Authenticate using the token
login(HUGGINGFACE_TOKEN)
model_id = "meta-llama/Meta-Llama-3-8B"
# Load model and tokenizer
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16)
generator = transformers.pipeline(
"text-generation", model=model, tokenizer=tokenizer, device_map="auto"
)
text = st.text_area("Ask me Anything")
if text:
result = generator(text)
st.success(result)