DSXiangLi commited on
Commit
faf58a4
1 Parent(s): 29be613
Files changed (3) hide show
  1. ape/llm.py +3 -8
  2. ape/prompt.py +2 -5
  3. app.py +1 -1
ape/llm.py CHANGED
@@ -24,7 +24,7 @@ class LLMGPT(object):
24
  def __init__(self, openai_key):
25
  self.gen_llm = ChatOpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, verbose=True)
26
  self.eval_llm = OpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, logprobs=1, verbose=True)
27
- self.test_llm = ChatOpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, verbose=True)
28
  self.gen_chain = None
29
  self.eval_chain = None
30
 
@@ -61,13 +61,8 @@ class LLMGPT(object):
61
 
62
  def generate_output(self, test_prompt, instruction, input):
63
  if not test_prompt:
64
- test_prompt = MyTemplate['test_user_prompt']
65
- prompt = ChatPromptTemplate.from_messages(
66
- [
67
- SystemMessagePromptTemplate.from_template(MyTemplate['test_sys_prompt']),
68
- HumanMessagePromptTemplate.from_template(test_prompt),
69
- ]
70
- )
71
  test_chain = LLMChain(llm=self.test_llm, prompt=prompt)
72
  output = test_chain({'input': input, 'instruction': instruction})
73
  return output
 
24
  def __init__(self, openai_key):
25
  self.gen_llm = ChatOpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, verbose=True)
26
  self.eval_llm = OpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, logprobs=1, verbose=True)
27
+ self.test_llm = OpenAI(openai_api_key=openai_key, max_tokens=2000, temperature=0.7, verbose=True)
28
  self.gen_chain = None
29
  self.eval_chain = None
30
 
 
61
 
62
  def generate_output(self, test_prompt, instruction, input):
63
  if not test_prompt:
64
+ test_prompt = MyTemplate['test_prompt']
65
+ prompt = PromptTemplate.from_template(test_prompt)
 
 
 
 
 
66
  test_chain = LLMChain(llm=self.test_llm, prompt=prompt)
67
  output = test_chain({'input': input, 'instruction': instruction})
68
  return output
ape/prompt.py CHANGED
@@ -7,15 +7,12 @@ gen_sys_prompt = ""
7
 
8
  eval_prompt = "任务描述:{instruction}\n输入:{input}\n输出:{output}"
9
 
10
- test_sys_prompt = ""
11
-
12
- test_user_prompt = "任务描述:{instruction}\n输入:{input}\n输出:"
13
 
14
  MyTemplate = {
15
  'gen_user_prompt': gen_user_prompt,
16
  'gen_sys_prompt': gen_sys_prompt,
17
  'eval_prompt': eval_prompt,
18
  'few_shot_prompt': few_shot_prompt,
19
- 'test_sys_prompt': test_sys_prompt,
20
- 'test_user_prompt': test_user_prompt
21
  }
 
7
 
8
  eval_prompt = "任务描述:{instruction}\n输入:{input}\n输出:{output}"
9
 
10
+ test_prompt = "任务描述:{instruction}\n输入:{input}\n输出:"
 
 
11
 
12
  MyTemplate = {
13
  'gen_user_prompt': gen_user_prompt,
14
  'gen_sys_prompt': gen_sys_prompt,
15
  'eval_prompt': eval_prompt,
16
  'few_shot_prompt': few_shot_prompt,
17
+ 'test_prompt': test_prompt
 
18
  }
app.py CHANGED
@@ -49,7 +49,7 @@ with gr.Blocks(title="Automatic Prompt Engineer", theme=gr.themes.Glass()) as de
49
  placeholder=MyTemplate['eval_prompt'],
50
  value='', label="Prompt for Evaluation")
51
  test_prompt = gr.Textbox(max_lines=100, lines=3, interative=True,
52
- placeholder=MyTemplate['test_user_prompt'],
53
  value='', label="Prompt for Single Test")
54
 
55
  with gr.Row():
 
49
  placeholder=MyTemplate['eval_prompt'],
50
  value='', label="Prompt for Evaluation")
51
  test_prompt = gr.Textbox(max_lines=100, lines=3, interative=True,
52
+ placeholder=MyTemplate['test_prompt'],
53
  value='', label="Prompt for Single Test")
54
 
55
  with gr.Row():