Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
import torch | |
# Load the model and tokenizer | |
tokenizer = AutoTokenizer.from_pretrained("abdulwaheed1/urdu_to_english_translation_mbart") | |
model = AutoModelForSeq2SeqLM.from_pretrained("abdulwaheed1/urdu_to_english_translation_mbart") | |
# Function to translate Urdu text to English | |
def translate_urdu_to_english(urdu_text): | |
try: | |
# Tokenize the input Urdu text | |
inputs = tokenizer(urdu_text, return_tensors="pt", padding=True, truncation=True) | |
# Generate translation using the model | |
with torch.no_grad(): | |
translated_tokens = model.generate(**inputs, max_length=512) | |
# Decode the generated tokens into English text | |
translated_text = tokenizer.decode(translated_tokens[0], skip_special_tokens=True) | |
return translated_text | |
except Exception as e: | |
# Return an error message if something goes wrong | |
return f"Error in translation: {str(e)}" | |
# Set up Gradio interface | |
iface = gr.Interface( | |
fn=translate_urdu_to_english, # Function to call | |
inputs=gr.Textbox(label="Enter Urdu Text"), # Textbox for user input | |
outputs=gr.Textbox(label="Translated English Text"), # Textbox for displaying output | |
live=True # Optionally, enable live translation (i.e., as the user types) | |
) | |
# Launch the Gradio interface | |
iface.launch() | |