Spaces:
Paused
Paused
Carlosito16
commited on
Commit
•
87a53f8
1
Parent(s):
eb08e6a
Update pages/3_chat.py
Browse files- pages/3_chat.py +12 -3
pages/3_chat.py
CHANGED
@@ -7,6 +7,7 @@ import gspread
|
|
7 |
import torch
|
8 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
9 |
|
|
|
10 |
|
11 |
# from langchain.vectorstores import Chroma
|
12 |
from langchain.vectorstores import FAISS
|
@@ -59,7 +60,8 @@ def load_conversational_qa_memory_retriever():
|
|
59 |
return conversational_qa_memory_retriever, question_generator
|
60 |
|
61 |
def new_retrieve_answer():
|
62 |
-
|
|
|
63 |
answer = conversational_qa_memory_retriever({"question": prompt_answer })
|
64 |
|
65 |
print(f"condensed quesion : {question_generator.run({'chat_history': answer['chat_history'], 'question' : prompt_answer})}")
|
@@ -71,12 +73,19 @@ def new_retrieve_answer():
|
|
71 |
|
72 |
st.session_state.my_text_input = ""
|
73 |
|
74 |
-
return answer['answer']
|
75 |
|
76 |
def clean_chat_history():
|
77 |
st.session_state.chat_history = []
|
78 |
conversational_qa_memory_retriever.memory.chat_memory.clear() #add this to remove
|
79 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
|
81 |
if "history" not in st.session_state: #this one is for the google sheet logging
|
82 |
st.session_state.history = []
|
@@ -89,7 +98,7 @@ if "chat_history" not in st.session_state: #this one is to pass previous message
|
|
89 |
llm_model = st.session_state['model']
|
90 |
vector_database = st.session_state['faiss_db']
|
91 |
conversational_qa_memory_retriever, question_generator = load_conversational_qa_memory_retriever()
|
92 |
-
|
93 |
|
94 |
|
95 |
print("all load done")
|
|
|
7 |
import torch
|
8 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
9 |
|
10 |
+
from googletrans import Translator
|
11 |
|
12 |
# from langchain.vectorstores import Chroma
|
13 |
from langchain.vectorstores import FAISS
|
|
|
60 |
return conversational_qa_memory_retriever, question_generator
|
61 |
|
62 |
def new_retrieve_answer():
|
63 |
+
translated_to_eng = thai_to_eng(st.session_state.my_text_input).text
|
64 |
+
prompt_answer= translated_to_eng + ". Try to be elaborate and informative in your answer."
|
65 |
answer = conversational_qa_memory_retriever({"question": prompt_answer })
|
66 |
|
67 |
print(f"condensed quesion : {question_generator.run({'chat_history': answer['chat_history'], 'question' : prompt_answer})}")
|
|
|
73 |
|
74 |
st.session_state.my_text_input = ""
|
75 |
|
76 |
+
return eng_to_thai(answer['answer']).text #this positional slicing helps remove "<pad> " at the beginning
|
77 |
|
78 |
def clean_chat_history():
|
79 |
st.session_state.chat_history = []
|
80 |
conversational_qa_memory_retriever.memory.chat_memory.clear() #add this to remove
|
81 |
|
82 |
+
def thai_to_eng(text):
|
83 |
+
translated = translator.translate(text, src='th', dest ='en')
|
84 |
+
return translated
|
85 |
+
|
86 |
+
def eng_to_thai(text):
|
87 |
+
translated = translator.translate(text, src='en', dest ='th')
|
88 |
+
return translated
|
89 |
|
90 |
if "history" not in st.session_state: #this one is for the google sheet logging
|
91 |
st.session_state.history = []
|
|
|
98 |
llm_model = st.session_state['model']
|
99 |
vector_database = st.session_state['faiss_db']
|
100 |
conversational_qa_memory_retriever, question_generator = load_conversational_qa_memory_retriever()
|
101 |
+
translator = Translator()
|
102 |
|
103 |
|
104 |
print("all load done")
|