workout3 / app.py
Abbeite's picture
Update app.py
36c479c verified
raw
history blame
No virus
1.24 kB
import streamlit as st
import logging
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
# Set the logger to display only CRITICAL messages
logging.basicConfig(level=logging.CRITICAL)
# Cache the model and tokenizer to avoid reloading it every time
@st.experimental_singleton
def load_model():
model_name = "Abbeite/trail_wl" # Replace with your actual model name
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
return model, tokenizer
model, tokenizer = load_model()
# Function to generate text with the model
def generate_text(prompt):
formatted_prompt = f"[INST] {prompt} [/INST]" # Format the prompt according to your specification
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=300)
result = pipe(formatted_prompt)
return result[0]['generated_text']
st.title("Interact with Your Model")
# User input
user_input = st.text_area("Enter your prompt:", "")
if st.button("Submit"):
if user_input:
# Generate text based on the input
generated_text = generate_text(user_input)
st.write(generated_text)
else:
st.write("Please enter a prompt.")