import lib import nltk, torch import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM """ Created by Riky Ripaldo """ nltk.download('punkt') tokenz = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium") modelz = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium") def balasan(chats): input_ids = tokenz.encode(chats + tokenz.eos_token, return_tensors='pt') response_ids = modelz.generate(input_ids, max_length=1000, pad_token_id=tokenz.eos_token_id) response_teks = tokenz.decode(response_ids[0], skip_special_token=True) return response_teks gui = gr.Interface( fn = balasan, inputs = gr.inputs.Textbox(label="Ketik Pesan"), outputs = gr.outputs.Textbox(label="Balasan NesiaChan") ) gui.launch()