File size: 889 Bytes
77a444f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
import transformers
import gradio as gr
import torch

from transformers import GPT2LMHeadModel, GPT2Tokenizer

tokenizer = GPT2Tokenizer.from_pretrained('microsoft/DialoGPT-small')
model = GPT2LMHeadModel.from_pretrained('microsoft/DialoGPT-small')

def chat(message, token_response):
    token_message = tokenizer.encode(message + tokenizer.eos_token, return_tensors='pt')
    token_response = model.generate(token_message, max_length=1000, pad_token_id=tokenizer.eos_token_id)
    response = tokenizer.decode(token_response[:, token_message.shape[-1]:][0], skip_special_tokens=True)
    return response, token_response

input = gr.inputs.Textbox(lines=2, label='User:')
output = gr.outputs.Textbox(label='Bot:')

gr.Interface(fn=chat,
    title="DialoGPT-small",
    inputs=[input, "state"],
    outputs=[output, "state"],
    allow_screenshot=False,
    allow_flagging='never').launch()