Daniel Marques commited on
Commit
b9ea22a
1 Parent(s): 2948780

feat: add redis

Browse files
Files changed (1) hide show
  1. prompt_template_utils.py +92 -0
prompt_template_utils.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This file implements prompt template for llama based models.
3
+ Modify the prompt template based on the model you select.
4
+ This seems to have significant impact on the output of the LLM.
5
+ """
6
+
7
+ from langchain.memory import ConversationBufferMemory
8
+ from langchain.prompts import PromptTemplate
9
+
10
+ # this is specific to Llama-2.
11
+
12
+ # system_prompt = """You are a helpful assistant, you will use the context and documents provided in the training to answer users questions.
13
+ # Read the context provided before answering questions and think step by step. If you can't answer a user's question based on the
14
+ # context provided, inform the user. Don't use any other information to answer the user."""
15
+
16
+ # system_prompt = """You are a helpful assistant, and you will use the context and documents provided in the training to answer users' questions. Please read the context provided carefully before responding to questions and follow a step-by-step thought process. If you cannot answer a user's question based on the provided context, please inform the user. Do not use any other information to answer the user. Provide a detailed response based on the content of locally trained documents."""
17
+
18
+ system_prompt = """It's a useful assistant who will use the context and documents provided in the training to answer users' questions.
19
+ Read the context provided before answering the questions and think step by step. If you can't answer, just say "I don't know" and don't try to put together an answer to respond to the user."""
20
+
21
+ def get_prompt_template(system_prompt=system_prompt, promptTemplate_type=None, history=False):
22
+ if promptTemplate_type == "llama":
23
+ B_INST, E_INST = "[INST]", "[/INST]"
24
+ B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
25
+ SYSTEM_PROMPT = B_SYS + system_prompt + E_SYS
26
+ if history:
27
+ instruction = """
28
+ Context: {history} \n {context}
29
+ User: {question}"""
30
+
31
+ prompt_template = B_INST + SYSTEM_PROMPT + instruction + E_INST
32
+ prompt = PromptTemplate(input_variables=["history", "context", "question"], template=prompt_template)
33
+ else:
34
+ instruction = """
35
+ Context: {context}
36
+ User: {question}"""
37
+
38
+ prompt_template = B_INST + SYSTEM_PROMPT + instruction + E_INST
39
+ prompt = PromptTemplate(input_variables=["context", "question"], template=prompt_template)
40
+ elif promptTemplate_type == "mistral":
41
+ B_INST, E_INST = "<s>[INST] ", " [/INST]"
42
+ if history:
43
+ prompt_template = (
44
+ B_INST
45
+ + system_prompt
46
+ + """
47
+
48
+ Context: {history} \n {context}
49
+ User: {question}"""
50
+ + E_INST
51
+ )
52
+ prompt = PromptTemplate(input_variables=["history", "context", "question"], template=prompt_template)
53
+ else:
54
+ prompt_template = (
55
+ B_INST
56
+ + system_prompt
57
+ + """
58
+
59
+ Context: {context}
60
+ User: {question}"""
61
+ + E_INST
62
+ )
63
+ prompt = PromptTemplate(input_variables=["context", "question"], template=prompt_template)
64
+ else:
65
+ # change this based on the model you have selected.
66
+ if history:
67
+ prompt_template = (
68
+ system_prompt
69
+ + """
70
+
71
+ Context: {history} \n {context}
72
+ User: {question}
73
+ Answer:"""
74
+ )
75
+ prompt = PromptTemplate(input_variables=["history", "context", "question"], template=prompt_template)
76
+ else:
77
+ prompt_template = (
78
+ system_prompt
79
+ + """
80
+
81
+ Context: {context}
82
+ User: {question}
83
+ Answer:"""
84
+ )
85
+ prompt = PromptTemplate(input_variables=["context", "question"], template=prompt_template)
86
+
87
+ memory = ConversationBufferMemory(input_key="question", memory_key="history")
88
+
89
+ return (
90
+ prompt,
91
+ memory,
92
+ )