anpigon commited on
Commit
cf07068
·
1 Parent(s): 25e6293

refactor: Update llm.py and prompt.py files

Browse files
Files changed (2) hide show
  1. libs/llm.py +8 -3
  2. libs/prompt.py +1 -1
libs/llm.py CHANGED
@@ -10,7 +10,7 @@ from langchain.callbacks.base import BaseCallbackHandler
10
 
11
  class StreamCallback(BaseCallbackHandler):
12
  def on_llm_new_token(self, token: str, **kwargs):
13
- print(token, end="", flush=True)
14
 
15
 
16
  def get_llm(streaming=True):
@@ -46,8 +46,13 @@ def get_llm(streaming=True):
46
  streaming=streaming,
47
  callbacks=[StreamCallback()],
48
  ),
49
- ollama=ChatOllama(
50
- model="EEVE-Korean-10.8B:long",
 
 
 
 
 
51
  streaming=streaming,
52
  callbacks=[StreamCallback()],
53
  ),
 
10
 
11
  class StreamCallback(BaseCallbackHandler):
12
  def on_llm_new_token(self, token: str, **kwargs):
13
+ # print(token, end="", flush=True)
14
 
15
 
16
  def get_llm(streaming=True):
 
46
  streaming=streaming,
47
  callbacks=[StreamCallback()],
48
  ),
49
+ eeve=ChatOllama(
50
+ model="EEVE-Korean-10.8B",
51
+ streaming=streaming,
52
+ callbacks=[StreamCallback()],
53
+ ),
54
+ gemma2=ChatOllama(
55
+ model="gemma2",
56
  streaming=streaming,
57
  callbacks=[StreamCallback()],
58
  ),
libs/prompt.py CHANGED
@@ -28,4 +28,4 @@ def get_prompt(chat_history):
28
  MessagesPlaceholder(variable_name="history"),
29
  ("human", "{question}"),
30
  ]
31
- ).partial(history=chat_history.messages)
 
28
  MessagesPlaceholder(variable_name="history"),
29
  ("human", "{question}"),
30
  ]
31
+ ).partial(history=chat_history)