File size: 1,088 Bytes
63ccdfb
4fb8c33
 
63ccdfb
463cbdb
 
 
63ccdfb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import torch
from transformers import AutoModelWithLMHead, AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained('microsoft/DialoGPT-small', padding_side='right')
model = AutoModelWithLMHead.from_pretrained('tomkr000/scottbotai')

def chat(message, history):
    new_user_input_ids = tokenizer.encode(message + tokenizer.eos_token, return_tensors='pt')
    bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1) if step > 0 else new_user_input_ids
    
    chat_history_ids = model.generate(
    bot_input_ids, max_length=200,
    pad_token_id=tokenizer.eos_token_id,  
    no_repeat_ngram_size=3,       
    do_sample=True, 
    top_k=100, 
    top_p=0.7,
    temperature = 0.8
    )

    response = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)

    history.append((message, response))

    return history, history

chatbot = gr.Chatbot().style(color_map=("green", "pink"))

demo = gr.Interface(
    chat,
    ["text", "state"],
    [chatbot, "state"],
    allow_flagging="never",
)

demo.launch()