ahmedemara10's picture
.
4fece63
raw
history blame contribute delete
520 Bytes
import streamlit as st
from transformers import pipeline
import os
# Streamlit app title
st.title("Ollama LLaMA-2 Prompt Interface")
generator = pipeline('text-generation', model='meta-llama/Llama-2-7b-hf', use_auth_token=os.getenv("HF_TOKEN"))
user_prompt = st.text_input("Enter your prompt:", "The first man on the moon was ...")
if st.button("Generate Response"):
response = generator(user_prompt, max_length=50)
# Display the result in the Streamlit app
st.write("Response:")
st.write(response)