import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM tokenizer1 = AutoTokenizer.from_pretrained("salesken/translation-hi-en") model1 = AutoModelForSeq2SeqLM.from_pretrained("salesken/translation-hi-en") #tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-16B-mono") #model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-16B-mono") def greet(name): hin_snippet = name inputs = tokenizer1.encode(hin_snippet, return_tensors="pt",padding=True,max_length=512,truncation=True) outputs = model1.generate(inputs, max_length=128, num_beams=None, early_stopping=True) translated = tokenizer1.decode(outputs[0]).replace('',"").strip().lower() #input_ids = tokenizer("# "+name, return_tensors="pt").input_ids #sample1 = model.generate(input_ids, max_length=128) #return tokenizer.decode(sample1[0], skip_special_tokens=True) return translated iface = gr.Interface(fn=greet, inputs="text", outputs="text") iface.launch()