uyen13 commited on
Commit
c45a9fd
β€’
1 Parent(s): 967830d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -39
app.py CHANGED
@@ -9,7 +9,7 @@ from langchain.chains import ConversationalRetrievalChain
9
  from ctransformers import AutoModelForCausalLM
10
  from langchain_g4f import G4FLLM
11
  from g4f import Provider, models
12
- import spacy
13
  import requests
14
  # Define the path for generated embeddings
15
  DB_FAISS_PATH = 'vectorstore/db_faiss'
@@ -88,52 +88,83 @@ nlp = spacy.load("ja_core_news_sm")
88
 
89
  # Function for conversational chat
90
  def conversational_chat(query):
91
- result = None
92
- similarity_score = 0
93
- # Set a threshold for similarity (you can adjust this)
94
- similarity_threshold = 0.8
95
- while similarity_score <= similarity_threshold:
96
- result = chain({"question": query, "chat_history": st.session_state['history']})
97
- doc = nlp(result["answer"])
98
 
99
- # Define the Japanese phrase to match
100
- target_phrase = query # Replace with your desired Japanese phrase
101
 
102
- # Check for similarity
103
- similarity_score = doc.similarity(nlp(target_phrase))
104
-
105
  st.session_state['history'].append((query, result["answer"]))
106
  return result["answer"]
107
-
108
- # Initialize chat history
109
- if 'history' not in st.session_state:
110
- st.session_state['history'] = []
111
-
112
- # Initialize messages
113
- if 'generated' not in st.session_state:
114
- st.session_state['generated'] = ["こんにけは!zendo美ε₯³γ§γ™γ€‚δ½•γ‹γŠζŽ’γ—γ§γ™γ‹οΌŸ... πŸ€—"]
115
-
116
  if 'past' not in st.session_state:
117
- st.session_state['past'] = ["γƒγƒ£γƒƒγƒˆγ―γ“γ“γ‹γ‚‰"]
 
118
 
119
- # Create containers for chat history and user input
120
- response_container = st.container()
121
- container = st.container()
122
 
123
- # User input form
124
- with container:
125
- with st.form(key='my_form', clear_on_submit=True):
126
- user_input = st.text_input("ChatBox", placeholder="θ³ͺε•γ‚’γ”θ¨˜ε…₯ください... ", key='input')
127
- submit_button = st.form_submit_button(label='Send')
128
 
129
- if submit_button and user_input:
130
- output = conversational_chat(user_input)
131
- st.session_state['past'].append(user_input)
132
- st.session_state['generated'].append(output)
 
133
 
134
- # Display chat history
135
  if st.session_state['generated']:
136
- with response_container:
137
  for i in range(len(st.session_state['generated'])):
138
- message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile")
139
- message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  from ctransformers import AutoModelForCausalLM
10
  from langchain_g4f import G4FLLM
11
  from g4f import Provider, models
12
+ # import spacy
13
  import requests
14
  # Define the path for generated embeddings
15
  DB_FAISS_PATH = 'vectorstore/db_faiss'
 
88
 
89
  # Function for conversational chat
90
  def conversational_chat(query):
91
+ # result = None
92
+ # similarity_score = 0
93
+ # # Set a threshold for similarity (you can adjust this)
94
+ # similarity_threshold = 0.8
95
+ # while similarity_score <= similarity_threshold:
96
+ # result = chain({"question": query, "chat_history": st.session_state['history']})
97
+ # doc = nlp(result["answer"])
98
 
99
+ # # Define the Japanese phrase to match
100
+ # target_phrase = query # Replace with your desired Japanese phrase
101
 
102
+ # # Check for similarity
103
+ # similarity_score = doc.similarity(nlp(target_phrase))
104
+ result = chain({"question": query, "chat_history": st.session_state['history']})
105
  st.session_state['history'].append((query, result["answer"]))
106
  return result["answer"]
107
+
108
+ # Initialize session state if not already done
 
 
 
 
 
 
 
109
  if 'past' not in st.session_state:
110
+ st.session_state['past'] = []
111
+ st.session_state['generated'] = []
112
 
113
+ # Create a container for the chat history
114
+ chat_history_container = st.container()
 
115
 
116
+ # Create a form for user input
117
+ with st.form(key='my_form', clear_on_submit=True):
118
+ user_input = st.text_input("ChatBox", placeholder="Ask anything...", key='input')
119
+ submit_button = st.form_submit_button(label='Send')
 
120
 
121
+ # Process user input and update chat history
122
+ if submit_button and user_input:
123
+ output = conversational_chat(user_input)
124
+ st.session_state['past'].append(user_input)
125
+ st.session_state['generated'].append(output)
126
 
127
+ # Display chat history within the container
128
  if st.session_state['generated']:
129
+ with chat_history_container:
130
  for i in range(len(st.session_state['generated'])):
131
+ st.text(st.session_state["past"][i], key=str(i) + '_user')
132
+ st.text(st.session_state["generated"][i], key=str(i))
133
+
134
+ # Ensure chat history is always scrollable
135
+ st.markdown("""<style>
136
+ .stText { overflow-y: auto; }
137
+ </style>""", unsafe_allow_html=True)
138
+
139
+ # Initialize chat history
140
+ # if 'history' not in st.session_state:
141
+ # st.session_state['history'] = []
142
+
143
+ # # Initialize messages
144
+ # if 'generated' not in st.session_state:
145
+ # st.session_state['generated'] = ["こんにけは!zendo美ε₯³γ§γ™γ€‚δ½•γ‹γŠζŽ’γ—γ§γ™γ‹οΌŸ... πŸ€—"]
146
+
147
+ # if 'past' not in st.session_state:
148
+ # st.session_state['past'] = ["γƒγƒ£γƒƒγƒˆγ―γ“γ“γ‹γ‚‰"]
149
+
150
+ # # Create containers for chat history and user input
151
+ # response_container = st.container()
152
+ # container = st.container()
153
+
154
+ # # User input form
155
+ # with container:
156
+ # with st.form(key='my_form', clear_on_submit=True):
157
+ # user_input = st.text_input("ChatBox", placeholder="θ³ͺε•γ‚’γ”θ¨˜ε…₯ください... ", key='input')
158
+ # submit_button = st.form_submit_button(label='Send')
159
+
160
+ # if submit_button and user_input:
161
+ # output = conversational_chat(user_input)
162
+ # st.session_state['past'].append(user_input)
163
+ # st.session_state['generated'].append(output)
164
+
165
+ # # Display chat history
166
+ # if st.session_state['generated']:
167
+ # with response_container:
168
+ # for i in range(len(st.session_state['generated'])):
169
+ # message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile")
170
+ # message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs")