Update app.py
Browse files
app.py
CHANGED
@@ -145,12 +145,14 @@ def generate(prompt, history):
|
|
145 |
"""
|
146 |
|
147 |
response = ""
|
|
|
148 |
|
149 |
while(len(response) < 1):
|
150 |
output = llm(context, max_tokens=400, stop=["Nurse:"], echo=False)
|
151 |
response = output["choices"][0]["text"]
|
152 |
response = response.strip()
|
153 |
-
|
|
|
154 |
|
155 |
|
156 |
# for output in llm(input, stream=True, max_tokens=100, ):
|
@@ -159,12 +161,11 @@ def generate(prompt, history):
|
|
159 |
# chatbot[-1] = (chatbot[-1][0], response)
|
160 |
|
161 |
# yield response
|
162 |
-
|
|
|
163 |
context += response
|
164 |
print (context)
|
165 |
-
|
166 |
-
history.append(response)
|
167 |
-
yield response
|
168 |
|
169 |
else:
|
170 |
output = "Did you forget to enter your Details? Please go to the User Info Tab and Input your data. "
|
|
|
145 |
"""
|
146 |
|
147 |
response = ""
|
148 |
+
history2 = []
|
149 |
|
150 |
while(len(response) < 1):
|
151 |
output = llm(context, max_tokens=400, stop=["Nurse:"], echo=False)
|
152 |
response = output["choices"][0]["text"]
|
153 |
response = response.strip()
|
154 |
+
history2.append(("generation", response))
|
155 |
+
yield history
|
156 |
|
157 |
|
158 |
# for output in llm(input, stream=True, max_tokens=100, ):
|
|
|
161 |
# chatbot[-1] = (chatbot[-1][0], response)
|
162 |
|
163 |
# yield response
|
164 |
+
|
165 |
+
history.append((prompt, response))
|
166 |
context += response
|
167 |
print (context)
|
168 |
+
yield history
|
|
|
|
|
169 |
|
170 |
else:
|
171 |
output = "Did you forget to enter your Details? Please go to the User Info Tab and Input your data. "
|