jamescg commited on
Commit
5d3e016
1 Parent(s): 8674acb

Update query_data.py

Browse files
Files changed (1) hide show
  1. query_data.py +0 -13
query_data.py CHANGED
@@ -1,19 +1,9 @@
1
  from langchain.prompts.prompt import PromptTemplate
2
  from langchain.llms import OpenAI
3
  from langchain.chains import ChatVectorDBChain
4
- from langchain.document_loaders import TextLoader
5
- loader = TextLoader('SMR4 publication.txt')
6
- from langchain.indexes import VectorstoreIndexCreator
7
- index = VectorstoreIndexCreator().from_loaders([loader])
8
-
9
- # Load Data
10
- loader = TextLoader("SMR4 publication.txt")
11
- raw_documents = loader.load()
12
-
13
 
14
  _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
15
  You can assume the question about the SMR4 publication.
16
- index.query(query)
17
 
18
  Chat History:
19
  {chat_history}
@@ -30,8 +20,6 @@ Question: {question}
30
  {context}
31
  =========
32
  Answer in Markdown:"""
33
- index.query(query)
34
-
35
  QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
36
 
37
 
@@ -44,4 +32,3 @@ def get_chain(vectorstore):
44
  condense_question_prompt=CONDENSE_QUESTION_PROMPT,
45
  )
46
  return qa_chain
47
- index.query(query)
 
1
  from langchain.prompts.prompt import PromptTemplate
2
  from langchain.llms import OpenAI
3
  from langchain.chains import ChatVectorDBChain
 
 
 
 
 
 
 
 
 
4
 
5
  _template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
6
  You can assume the question about the SMR4 publication.
 
7
 
8
  Chat History:
9
  {chat_history}
 
20
  {context}
21
  =========
22
  Answer in Markdown:"""
 
 
23
  QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
24
 
25
 
 
32
  condense_question_prompt=CONDENSE_QUESTION_PROMPT,
33
  )
34
  return qa_chain