Pclanglais commited on
Commit
0abf936
1 Parent(s): 765817d

Update prompt_demo_rag.py

Browse files
Files changed (1) hide show
  1. prompt_demo_rag.py +3 -5
prompt_demo_rag.py CHANGED
@@ -19,7 +19,7 @@ sys.path.append(".")
19
  os.chdir(os.path.dirname(os.path.abspath(__file__)))
20
 
21
  def get_llm_response(prompt_template):
22
- sampling_params = SamplingParams(temperature=0.4, top_p=.95, max_tokens=2000, presence_penalty = 2)
23
  prompts = [prompt_template]
24
  outputs = llm.generate(prompts, sampling_params, use_tqdm = False)
25
  generated_text = outputs[0].outputs[0].text
@@ -28,6 +28,8 @@ def get_llm_response(prompt_template):
28
 
29
  #Typical example:
30
 
 
 
31
 
32
  if __name__ == "__main__":
33
 
@@ -96,10 +98,6 @@ if __name__ == "__main__":
96
  rendered_template = template.render(**data)
97
  print(rendered_template)
98
  print("---")
99
-
100
- llm = LLM("mistral-mfs-reference-2/mistral-mfs-reference-2")
101
-
102
- sampling_params = SamplingParams(temperature=0.7, top_p=0.95, max_tokens=1500)
103
 
104
  prompt, generated_text = get_llm_response(rendered_template)
105
  print("Albert : ", generated_text)
 
19
  os.chdir(os.path.dirname(os.path.abspath(__file__)))
20
 
21
  def get_llm_response(prompt_template):
22
+ sampling_params = SamplingParams(temperature=.7, top_p=.95, max_tokens=2000, presence_penalty = 1.5, stop = ["``"]) #Officially recommended parameters
23
  prompts = [prompt_template]
24
  outputs = llm.generate(prompts, sampling_params, use_tqdm = False)
25
  generated_text = outputs[0].outputs[0].text
 
28
 
29
  #Typical example:
30
 
31
+ llm = LLM("AgentPublic/Guillaume-Tell")
32
+
33
 
34
  if __name__ == "__main__":
35
 
 
98
  rendered_template = template.render(**data)
99
  print(rendered_template)
100
  print("---")
 
 
 
 
101
 
102
  prompt, generated_text = get_llm_response(rendered_template)
103
  print("Albert : ", generated_text)