sabssag's picture
Update app.py
b1f44d7 verified
import streamlit as st
import torch
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Load the fine-tuned model and tokenizer
model_repo_path = 'sabssag/Latex_to_Python_T5-small'
model = AutoModelForSeq2SeqLM.from_pretrained(model_repo_path)
tokenizer = AutoTokenizer.from_pretrained(model_repo_path)
model.eval()
# Function to generate Python code from LaTeX expression
def generate_code_from_latex(latex_expression, max_length=256):
inputs = tokenizer(f"Latex Expression: {latex_expression} Solution:", return_tensors="pt").to(model.device)
# Generate the output
outputs = model.generate(**inputs, max_length=max_length)
# Decode the output into Python code
generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
return generated_code
# Streamlit app layout
st.title("LaTeX to Python Code Generator")
# Define session state keys
if 'latex_expr' not in st.session_state:
st.session_state.latex_expr = ""
# User input for LaTeX expression
latex_input = st.text_area("Enter the LaTeX Expression", value=st.session_state.latex_expr, height=150)
# Update session state with the new LaTeX expression
if st.button("Generate Code"):
if latex_input:
st.session_state.latex_expr = latex_input
with st.spinner("Generating Python Code..."):
try:
# Correct function name here
generated_code = generate_code_from_latex(latex_expression=st.session_state.latex_expr)
# Display the generated code
st.subheader("Generated Python Code")
st.code(generated_code, language='python')
except Exception as e:
st.error(f"Error during code generation: {e}")
else:
st.warning("Please enter a LaTeX expression to generate Python code.")