pedropauletti commited on
Commit
faf4e56
1 Parent(s): 945537d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -114
app.py CHANGED
@@ -1,123 +1,21 @@
1
  import gradio as gr
2
- import os
3
- import time
4
- from haystack.document_stores import InMemoryDocumentStore
5
- from haystack.nodes import EmbeddingRetriever
6
- import pandas as pd
7
- from deep_translator import GoogleTranslator
8
 
9
 
10
- def load_qa_model():
11
- document_store = InMemoryDocumentStore()
12
- retriever = EmbeddingRetriever(
13
- document_store=document_store,
14
- embedding_model="sentence-transformers/all-MiniLM-L6-v2",
15
- use_gpu=False,
16
- scale_score=False,
17
- )
18
- # Get dataframe with columns "question", "answer" and some custom metadata
19
- df = pd.read_csv('social-faq.csv', on_bad_lines='skip', delimiter=';')
20
- # Minimal cleaning
21
- df.fillna(value="", inplace=True)
22
- df["question"] = df["question"].apply(lambda x: x.strip())
23
-
24
- questions = list(df["question"].values)
25
- df["embedding"] = retriever.embed_queries(queries=questions).tolist()
26
- df = df.rename(columns={"question": "content"})
27
-
28
- # Convert Dataframe to list of dicts and index them in our DocumentStore
29
- docs_to_index = df.to_dict(orient="records")
30
- document_store.write_documents(docs_to_index)
31
-
32
- return retriever
33
-
34
- def add_text(chat_history, text):
35
- chat_history = chat_history + [(text, None)]
36
- return chat_history, gr.Textbox(value="", interactive=False)
37
-
38
-
39
- def add_file(chat_history, file):
40
- chat_history = chat_history + [((file.name,), None)]
41
- return chat_history
42
-
43
-
44
- def chatbot_response(chat_history, language):
45
-
46
- chat_history[-1][1] = ""
47
-
48
- global retriever
49
- global last_answer
50
-
51
- if language == 'pt-br':
52
- response = get_answers(retriever, GoogleTranslator(source='pt', target='en').translate(chat_history[-1][0]))
53
- response = GoogleTranslator(source='en', target='pt').translate(response)
54
  else:
55
- response = get_answers(retriever, chat_history[-1][0])
56
-
57
- last_answer = response
58
-
59
- for character in response:
60
- chat_history[-1][1] += character
61
- time.sleep(0.01)
62
- yield chat_history
63
-
64
-
65
-
66
- def get_answers(retriever, query):
67
- from haystack.pipelines import FAQPipeline
68
-
69
- pipe = FAQPipeline(retriever=retriever)
70
-
71
- from haystack.utils import print_answers
72
-
73
- # Run any question and change top_k to see more or less answers
74
- prediction = pipe.run(query=query, params={"Retriever": {"top_k": 1}})
75
-
76
- answers = prediction['answers']
77
-
78
- if answers:
79
- return answers[0].answer
80
- else:
81
- return "I don't have an answer to that question"
82
-
83
-
84
-
85
-
86
- retriever = load_qa_model()
87
-
88
- last_answer = ""
89
 
90
 
91
  with gr.Blocks() as demo:
92
- with gr.Accordion("Settings", open=False):
93
- language = gr.Radio(["en-us", "pt-br"], label="Language", info="Choose the language to display the classification result and audio", value='en-us', interactive=True)
94
-
95
-
96
- with gr.Tab("Help"):
97
- chatbot = gr.Chatbot(
98
- [],
99
- elem_id="chatbot",
100
- bubble_full_width=False,
101
- # avatar_images=(None, "content/avatar-socialear.png"),
102
- )
103
-
104
- with gr.Row():
105
- txt = gr.Textbox(
106
- scale=4,
107
- show_label=False,
108
- placeholder="Enter text and press enter",
109
- container=False,
110
- )
111
- inputRecord = gr.Audio(label="Record a question", source="microphone", type="filepath")
112
- with gr.Column():
113
- btn = gr.Button(value="Listen the answer")
114
- audioOutput = gr.Audio(interactive=False)
115
 
116
- txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
117
- chatbot_response, [chatbot, language], chatbot
118
- )
119
- txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
120
-
121
 
122
- demo.queue()
123
- demo.launch()
 
1
  import gradio as gr
 
 
 
 
 
 
2
 
3
 
4
+ def change_textbox(choice):
5
+ if choice == "short":
6
+ return gr.Textbox(lines=2, visible=True)
7
+ elif choice == "long":
8
+ return gr.Textbox(lines=8, visible=True, value="Lorem ipsum dolor sit amet")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  else:
10
+ return gr.Textbox(visible=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
 
13
  with gr.Blocks() as demo:
14
+ radio = gr.Radio(
15
+ ["short", "long", "none"], label="What kind of essay would you like to write?"
16
+ )
17
+ text = gr.Textbox(lines=2, interactive=True, show_copy_button=True)
18
+ radio.change(fn=change_textbox, inputs=radio, outputs=text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
 
 
 
 
 
 
20
 
21
+ demo.launch()