awacke1's picture
Create app.py
9569dec verified
raw
history blame
737 Bytes
# Load model directly
import streamlit as st
import transformers
st.title("A Simple Interface for a Language Model")
st.subheader("Input Text")
input_text = st.text_area("Enter your text here", "Type something here...")
if st.button("Generate Response"):
# Initialize tokenizer and model
tokenizer = transformers.AutoTokenizer.from_pretrained("microsoft/phi-2")
model = transformers.AutoModelForCausalLM.from_pretrained("microsoft/phi-2")
# Encode input text
inputs = tokenizer(input_text, return_tensors="pt")
# Generate response
response = model.generate(**inputs, max_length=100, do_sample=True)
# Decode response
st.subheader("Generated Response")
st.write(tokenizer.decode(response[0]))