hikinegi commited on
Commit
ea4c985
β€’
1 Parent(s): 8b63739

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -0
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import random
3
+ import time
4
+ from transformers import pipeline,AutoModelForSeq2SeqLM,AutoTokenizer
5
+
6
+ model = AutoModelForSeq2SeqLM.from_pretrained("google/flan-t5-base")
7
+ tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-base")
8
+
9
+ context=""
10
+
11
+ def generate_answer(question):
12
+ prompt = question +". \nAnswer this question given context in next line if answer is present in context otherwise say I don't know about that. Context: \n "+context
13
+ inputs = tokenizer(prompt , return_tensors="pt")
14
+ outputs = model.generate(**inputs)
15
+ return (tokenizer.batch_decode(outputs, skip_special_tokens=True))
16
+
17
+ def upload_file(file):
18
+ global context
19
+ with open(file.name, encoding="utf-8") as f:
20
+ context = f.read()
21
+
22
+ with gr.Blocks() as demo:
23
+ file_output = gr.File()
24
+ upload_button = gr.UploadButton("Click to Upload a File", file_types=["txt", "pdf"])
25
+ upload_button.upload(upload_file, upload_button, file_output)
26
+ chatbot = gr.Chatbot()
27
+ msg = gr.Textbox()
28
+ clear = gr.ClearButton([msg, chatbot,upload_button])
29
+
30
+ def respond(message, chat_history):
31
+ ans=generate_answer(message)
32
+
33
+ chat_history.append((message, f"\n {ans} "))
34
+ return "", chat_history
35
+
36
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
37
+
38
+ with gr.Row(visible=True) as button_row:
39
+ upvote_btn = gr.Button(value="πŸ‘ Upvote", interactive=True)
40
+ downvote_btn = gr.Button(value="πŸ‘Ž Downvote", interactive=True)
41
+
42
+ demo.queue()
43
+ demo.launch(debug=True)