vishwask commited on
Commit
a17bd15
1 Parent(s): ff78698

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -42,6 +42,7 @@ from langchain.vectorstores.utils import filter_complex_metadata
42
  import fitz
43
  from PIL import Image
44
  from langchain.vectorstores import FAISS
 
45
 
46
  user_session_id = uuid.uuid4()
47
 
@@ -136,6 +137,7 @@ def load_model(_docs):
136
  template = generate_prompt("""{context} Question: {question} """,system_prompt=SYSTEM_PROMPT,) #Enter memory here!
137
 
138
  prompt = PromptTemplate(template=template, input_variables=["context", "question"]) #Add history here
 
139
 
140
  qa_chain = RetrievalQA.from_chain_type(
141
  llm=llm,
@@ -144,6 +146,7 @@ def load_model(_docs):
144
  return_source_documents=True,
145
  chain_type_kwargs={"prompt": prompt,
146
  "verbose": False,
 
147
  #"memory": ConversationBufferMemory(
148
  #memory_key="history",
149
  #input_key="question",
 
42
  import fitz
43
  from PIL import Image
44
  from langchain.vectorstores import FAISS
45
+ import transformers
46
 
47
  user_session_id = uuid.uuid4()
48
 
 
137
  template = generate_prompt("""{context} Question: {question} """,system_prompt=SYSTEM_PROMPT,) #Enter memory here!
138
 
139
  prompt = PromptTemplate(template=template, input_variables=["context", "question"]) #Add history here
140
+ streamer = transformers.TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
141
 
142
  qa_chain = RetrievalQA.from_chain_type(
143
  llm=llm,
 
146
  return_source_documents=True,
147
  chain_type_kwargs={"prompt": prompt,
148
  "verbose": False,
149
+ streamer=streamer,
150
  #"memory": ConversationBufferMemory(
151
  #memory_key="history",
152
  #input_key="question",