guangliang.yin commited on
Commit
944d0e1
1 Parent(s): 3de91d2

优化代码逻辑 准备自定义提示词

Browse files
app.py CHANGED
@@ -12,7 +12,11 @@ from project.prompt.answer_by_private_prompt import (
12
  COMBINE_PROMPT,
13
  EXAMPLE_PROMPT,
14
  QUESTION_PROMPT,
 
 
15
  )
 
 
16
 
17
  chain: Optional[Callable] = None
18
 
@@ -54,13 +58,33 @@ def web_loader(file, openai_key, puzhiai_key, zilliz_uri, user, password):
54
  # retriever=docsearch.as_retriever(),
55
  #)
56
 
57
- chain = RetrievalQAWithSourcesChain.from_llm(
58
- ZhipuAILLM(model="glm-3-turbo", temperature=0.1, zhipuai_api_key=puzhiai_key),
59
- EXAMPLE_PROMPT,
60
- QUESTION_PROMPT,
61
- COMBINE_PROMPT,
62
- retriever=docsearch.as_retriever(),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  )
 
 
64
  return "success to load data"
65
 
66
 
@@ -81,7 +105,7 @@ if __name__ == "__main__":
81
  """
82
  <h1><center>Langchain And Zilliz App</center></h1>
83
 
84
- v.2.27.13.58
85
 
86
  """
87
  )
 
12
  COMBINE_PROMPT,
13
  EXAMPLE_PROMPT,
14
  QUESTION_PROMPT,
15
+ DEFAULT_TEXT_QA_PROMPT,
16
+ DEFAULT_REFINE_PROMPT
17
  )
18
+ from langchain.chains.combine_documents.refine import RefineDocumentsChain
19
+ from langchain.chains.llm import LLMChain
20
 
21
  chain: Optional[Callable] = None
22
 
 
58
  # retriever=docsearch.as_retriever(),
59
  #)
60
 
61
+ #chain = RetrievalQAWithSourcesChain.from_llm(
62
+ # ZhipuAILLM(model="glm-3-turbo", temperature=0.1, zhipuai_api_key=puzhiai_key),
63
+ # EXAMPLE_PROMPT,
64
+ # QUESTION_PROMPT,
65
+ # COMBINE_PROMPT,
66
+ # retriever=docsearch.as_retriever(),
67
+ #)
68
+
69
+ llm = ZhipuAILLM(model="glm-3-turbo", temperature=0.1, zhipuai_api_key=puzhiai_key)
70
+
71
+ question_prompt = DEFAULT_TEXT_QA_PROMPT,
72
+ refine_prompt = DEFAULT_REFINE_PROMPT,
73
+ document_prompt = EXAMPLE_PROMPT,
74
+ verbose = Optional[bool] = None,
75
+ initial_chain = LLMChain(llm=llm, prompt=question_prompt, verbose=verbose)
76
+ _refine_llm = llm
77
+ refine_chain = LLMChain(llm=_refine_llm, prompt=refine_prompt, verbose=verbose)
78
+ combine_documents_chain = RefineDocumentsChain(
79
+ initial_llm_chain=initial_chain,
80
+ refine_llm_chain=refine_chain,
81
+ document_variable_name="context_str",
82
+ initial_response_name="existing_answer",
83
+ document_prompt=document_prompt,
84
+ verbose=verbose,
85
  )
86
+
87
+ chain = RetrievalQAWithSourcesChain(combine_documents_chain=combine_documents_chain)
88
  return "success to load data"
89
 
90
 
 
105
  """
106
  <h1><center>Langchain And Zilliz App</center></h1>
107
 
108
+ v.2.27.14.31
109
 
110
  """
111
  )
project/prompt/answer_by_private_prompt.py CHANGED
@@ -25,4 +25,35 @@ COMBINE_PROMPT = PromptTemplate(
25
  EXAMPLE_PROMPT = PromptTemplate(
26
  template="Content: {page_content}\nSource: {source}",
27
  input_variables=["page_content", "source"],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  )
 
25
  EXAMPLE_PROMPT = PromptTemplate(
26
  template="Content: {page_content}\nSource: {source}",
27
  input_variables=["page_content", "source"],
28
+ )
29
+
30
+ DEFAULT_REFINE_PROMPT_TMPL = (
31
+ "The original question is as follows: {question}\n"
32
+ "We have provided an existing answer, including sources: {existing_answer}\n"
33
+ "We have the opportunity to refine the existing answer"
34
+ "(only if needed) with some more context below.\n"
35
+ "------------\n"
36
+ "{context_str}\n"
37
+ "------------\n"
38
+ "Given the new context, refine the original answer to better "
39
+ "answer the question. "
40
+ "If you do update it, please update the sources as well. "
41
+ "If the context isn't useful, return the original answer."
42
+ )
43
+ DEFAULT_REFINE_PROMPT = PromptTemplate(
44
+ input_variables=["question", "existing_answer", "context_str"],
45
+ template=DEFAULT_REFINE_PROMPT_TMPL,
46
+ )
47
+
48
+
49
+ DEFAULT_TEXT_QA_PROMPT_TMPL = (
50
+ "Context information is below. \n"
51
+ "---------------------\n"
52
+ "{context_str}"
53
+ "\n---------------------\n"
54
+ "Given the context information and not prior knowledge, "
55
+ "answer the question: {question}\n"
56
+ )
57
+ DEFAULT_TEXT_QA_PROMPT = PromptTemplate(
58
+ input_variables=["context_str", "question"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
59
  )