MohamedUgas's picture
Create app.py
cad4b8f
raw
history blame
962 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
import torch
from ui import title, description, examples
from langs import LANGS
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/nllb-200-distilled-600M")
tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-600M")
device = 0 if torch.cuda.is_available() else -1
src_lang = 'som_Latn'
tgt_lang = "eng_Latn"
def translate(text):
translation_pipeline = pipeline("translation",
model=model,
tokenizer=tokenizer,
src_lang=src_lang,
tgt_lang=tgt_lang,
device=device)
result = translation_pipeline(text)
return result[0]['translation_text']
gr.Interface(
translate,
[
gr.components.Textbox(label="Text")
],
["text"],
).launch()