Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ from langchain.chat_models import ChatOpenAI
|
|
10 |
from langchain.schema import AIMessage, HumanMessage
|
11 |
from langchain import PromptTemplate, LLMChain
|
12 |
from langchain.llms import TextGen
|
13 |
-
|
14 |
|
15 |
|
16 |
import os
|
@@ -42,9 +42,12 @@ def predict(message, history):
|
|
42 |
|
43 |
partial_message = ""
|
44 |
for chunk in response:
|
45 |
-
if len(chunk[
|
46 |
-
|
|
|
47 |
yield partial_message
|
|
|
|
|
48 |
|
49 |
gr.ChatInterface(predict,
|
50 |
textbox=gr.Textbox(placeholder="请输入您的问题", container=False, scale=7),
|
|
|
10 |
from langchain.schema import AIMessage, HumanMessage
|
11 |
from langchain import PromptTemplate, LLMChain
|
12 |
from langchain.llms import TextGen
|
13 |
+
from langchain.cache import InMemoryCache
|
14 |
|
15 |
|
16 |
import os
|
|
|
42 |
|
43 |
partial_message = ""
|
44 |
for chunk in response:
|
45 |
+
if len(chunk[0]) != 0:
|
46 |
+
time.sleep(0.1)
|
47 |
+
partial_message = partial_message + chunk[0]
|
48 |
yield partial_message
|
49 |
+
|
50 |
+
langchain.llm_cache = InMemoryCache()
|
51 |
|
52 |
gr.ChatInterface(predict,
|
53 |
textbox=gr.Textbox(placeholder="请输入您的问题", container=False, scale=7),
|