william4416 commited on
Commit
878439c
1 Parent(s): 4d5efea

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +68 -0
app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ import gradio as gr
3
+ import torch
4
+ import json
5
+
6
+
7
+ title = "????AI ChatBot"
8
+ description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
9
+ examples = [["How are you?"]]
10
+
11
+
12
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
13
+ model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
14
+
15
+
16
+ def predict(input, history=[]):
17
+ # tokenize the new input sentence
18
+ new_user_input_ids = tokenizer.encode(
19
+ input + tokenizer.eos_token, return_tensors="pt"
20
+ )
21
+
22
+ # append the new user input tokens to the chat history
23
+ bot_input_ids = torch.cat([torch.LongTensor(history), new_user_input_ids], dim=-1)
24
+
25
+ # generate a response
26
+ history = model.generate(
27
+ bot_input_ids, max_length=4000, pad_token_id=tokenizer.eos_token_id
28
+ ).tolist()
29
+
30
+ # convert the tokens to text, and then split the responses into lines
31
+ response = tokenizer.decode(history[0]).split("<|endoftext|>")
32
+ # print('decoded_response-->>'+str(response))
33
+ response = [
34
+ (response[i], response[i + 1]) for i in range(0, len(response) - 1, 2)
35
+ ] # convert to tuples of list
36
+ # print('response-->>'+str(response))
37
+ return response, history
38
+
39
+ def read_json_file(file_path): #read json file test
40
+ with open(file_path, 'r') as file:
41
+ data = json.load(file)
42
+ return data
43
+
44
+ def main():
45
+ # List of file names
46
+ file_names = ['fileone.json', 'filesecond.json', 'filethird.json', 'filefourth.json', 'filefifth.json']
47
+
48
+ # Read each JSON file and print its content
49
+ for file_name in file_names:
50
+ json_data = read_json_file(file_name)
51
+ print(f"Contents of {file_name}:")
52
+ print(json_data)
53
+ print()
54
+
55
+ if __name__ == "__main__":
56
+ main()
57
+
58
+
59
+
60
+ gr.Interface(
61
+ fn=predict,
62
+ title=title,
63
+ description=description,
64
+ examples=examples,
65
+ inputs=["text", "state"],
66
+ outputs=["chatbot", "state"],
67
+ theme="finlaymacklon/boxy_violet",
68
+ ).launch()