Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
token = os.getenv("hf_token") | |
# Load the translation model and tokenizer from Hugging Face | |
model_name = "robzchhangte/enmz75-helcase" | |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name, use_auth_token=token) | |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=token) | |
# Translation function with max_length=512 | |
def translate(text): | |
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True) | |
outputs = model.generate(inputs["input_ids"], max_length=512) | |
translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return translated_text | |
# Gradio Interface | |
interface = gr.Interface( | |
fn=translate, | |
inputs="text", | |
outputs="text", | |
title="English to Mizo Translator", | |
examples=[["Hello, how are you?"], ["What is your name?"]] | |
) | |
# Launch the Gradio app locally | |
interface.launch(share=False) # Set sharer=True to share your app |