Ashishkr commited on
Commit
5457d9a
1 Parent(s): bde65d6

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +23 -6
model.py CHANGED
@@ -57,18 +57,35 @@ tokenizer = transformers.AutoTokenizer.from_pretrained(
57
 
58
 
59
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  def get_prompt(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> str:
61
  texts = [f'{system_prompt}\n']
62
 
63
- for user_input, response in chat_history[:-1]:
64
- texts.append(f'{user_input} {response}\n')
 
 
65
 
66
- # Getting the user input and response from the last tuple in the chat history
67
- last_user_input, last_response = chat_history[-1]
68
- texts.append(f' input: {last_user_input} {last_response} {message} response: ')
 
 
 
69
 
70
  return ''.join(texts)
71
-
72
 
73
  def get_input_token_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> int:
74
  prompt = get_prompt(message, chat_history, system_prompt)
 
57
 
58
 
59
 
60
+ # def get_prompt(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> str:
61
+ # texts = [f'{system_prompt}\n']
62
+
63
+ # for user_input, response in chat_history[:-1]:
64
+ # texts.append(f'{user_input} {response}\n')
65
+
66
+ # # Getting the user input and response from the last tuple in the chat history
67
+ # last_user_input, last_response = chat_history[-1]
68
+ # texts.append(f' input: {last_user_input} {last_response} {message} response: ')
69
+
70
+ # return ''.join(texts)
71
+
72
  def get_prompt(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> str:
73
  texts = [f'{system_prompt}\n']
74
 
75
+ # If chat_history is not empty, process all but the last entry
76
+ if chat_history:
77
+ for user_input, response in chat_history[:-1]:
78
+ texts.append(f'{user_input} {response}\n')
79
 
80
+ # Getting the user input and response from the last tuple in the chat history
81
+ last_user_input, last_response = chat_history[-1]
82
+ texts.append(f' input: {last_user_input} {last_response} {message} Response: ')
83
+ else:
84
+ # If chat_history is empty, just add the message with 'Response:' at the end
85
+ texts.append(f' input: {message} Response: ')
86
 
87
  return ''.join(texts)
88
+
89
 
90
  def get_input_token_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> int:
91
  prompt = get_prompt(message, chat_history, system_prompt)