Spaces:
No application file
No application file
"""the idea is to embed all KBOB categories as vectores. | |
then when a new document in added, we do a sim search with the doc vector in the KBOB vectores | |
to map/classify. can be done in multiple steps. """ | |
import streamlit as st | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Load the model and tokenizer | |
model_name = "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1" | |
model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1" # gated | |
model_name = "mistral-community/Mistral-7B-v0.2" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Function to generate text based on a prompt | |
def generate_text(llm_prompt): | |
inputs = tokenizer(llm_prompt, return_tensors="pt") | |
outputs = model.generate(inputs["input_ids"], max_length=100) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Set up the Streamlit interface | |
st.title("Mistral 7B Text Generation App") | |
prompt = st.text_area("Enter your prompt:", "Once upon a time") | |
if st.button("Generate"): | |
with st.spinner('Generating...'): | |
generated_text = generate_text(prompt) | |
st.text_area("Generated Text:", generated_text, height=300) | |