Chris4K commited on
Commit
53a3a44
1 Parent(s): da3ee0e
Files changed (1) hide show
  1. model/custom_agent.py +3 -1
model/custom_agent.py CHANGED
@@ -24,7 +24,7 @@ import requests
24
  from transformers import Agent
25
  from utils.logger import log_response
26
 
27
- from transformers import AutoModelForCausalLM, AutoTokenizer
28
 
29
 
30
  class CustomHfAgent(Agent):
@@ -108,6 +108,7 @@ class CustomHfAgent(Agent):
108
 
109
 
110
  description = "\n".join([f"- {name}: {tool.description}" for name, tool in self.toolbox.items()])
 
111
  if chat_mode:
112
  if self.chat_history is None:
113
  prompt = self.chat_prompt_template.replace("<<all_tools>>", description)
@@ -120,6 +121,7 @@ class CustomHfAgent(Agent):
120
  prompt = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
121
  else:
122
  prompt = self.chat_history
 
123
  cmp = CHAT_MESSAGE_PROMPT.replace("<<task>>", task)
124
  messages = [
125
  {
 
24
  from transformers import Agent
25
  from utils.logger import log_response
26
 
27
+ from transformers import AutoTokenizer
28
 
29
 
30
  class CustomHfAgent(Agent):
 
108
 
109
 
110
  description = "\n".join([f"- {name}: {tool.description}" for name, tool in self.toolbox.items()])
111
+
112
  if chat_mode:
113
  if self.chat_history is None:
114
  prompt = self.chat_prompt_template.replace("<<all_tools>>", description)
 
121
  prompt = tokenizer.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
122
  else:
123
  prompt = self.chat_history
124
+ cmp = ""
125
  cmp = CHAT_MESSAGE_PROMPT.replace("<<task>>", task)
126
  messages = [
127
  {