Mahalingam's picture
Create app.py
a6bf2ce
raw
history blame
1.48 kB
import streamlit as st
from transformers import BartForConditionalGeneration, BartTokenizer
# Load the model and tokenizer from the local directory
model_path = "disilbart-med-summary" # Replace with the actual path
tokenizer = BartTokenizer.from_pretrained(model_path)
model = BartForConditionalGeneration.from_pretrained(model_path)
# Function to generate summary based on input
def generate_summary(input_text):
# Tokenize the input text
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate summary
summary_ids = model.generate(input_ids, max_length=4000, num_beams=4, no_repeat_ngram_size=2)
# Decode the summary
summary_text = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary_text
# Streamlit app
def main():
# Apply custom styling for the title
st.markdown("<h3 style='text-align: center; color: #333;'>Medical Summary - Text Generation</h3>", unsafe_allow_html=True)
# Textbox for user input
user_input = st.text_area("Enter Text:", "")
# Button to trigger text generation
if st.button("Generate Summary"):
if user_input:
# Call the generate_summary function with user input
result = generate_summary(user_input)
# Display the generated summary in a text area with word wrap
st.text_area("Generated Summary:", result, key="generated_summary")
# Run the Streamlit app
if __name__ == "__main__":
main()