Chris4K commited on
Commit
c7588c6
1 Parent(s): 871babb
controller.py CHANGED
@@ -86,7 +86,7 @@ class Controller:
86
  Returns:
87
  - str: The response from the conversation chain.
88
  """
89
- agent_chat_bot = ConversationChainSingleton().get_conversation_chain()
90
  print(agent_chat_bot)
91
  print("------------ msg -----------------------")
92
  print(user_message + " ---- " )
 
86
  Returns:
87
  - str: The response from the conversation chain.
88
  """
89
+ agent_chat_bot = ConversationChainSingleton().conversation_chain("tmp")
90
  print(agent_chat_bot)
91
  print("------------ msg -----------------------")
92
  print(user_message + " ---- " )
model/conversation_chain_singleton.py CHANGED
@@ -17,6 +17,26 @@ from langchain.chains import ConversationChain
17
  from langchain.llms import HuggingFaceHub
18
 
19
  class ConversationChainSingleton:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  """
21
  A singleton class for managing a conversation chain instance.
22
 
@@ -30,7 +50,7 @@ class ConversationChainSingleton:
30
 
31
 
32
  - get_conversation_chain(): Creates and returns a conversational retrieval chain and a language model.
33
- """
34
 
35
  _instance = None
36
 
@@ -38,24 +58,7 @@ class ConversationChainSingleton:
38
  if not cls._instance:
39
  cls._instance = super(ConversationChainSingleton, cls).__new__(cls)
40
  # Initialize your conversation chain here
41
- cls._instance.conversation_chain = cls.get_conversation_chain()
42
  return cls._instance
43
-
44
 
45
- def get_conversation_chain():
46
- """
47
- Create a conversational retrieval chain and a language model.
48
-
49
- Returns:
50
- - ConversationChain: The initialized conversation chain.
51
- """
52
- llm = HuggingFaceHub(
53
- repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
54
- model_kwargs={"max_length": 1048, "temperature": 0.2, "max_new_tokens": 256, "top_p": 0.95, "repetition_penalty": 1.0},
55
- )
56
- memory = ConversationBufferMemory(memory_key="history", return_messages=True)
57
- conversation_chain = ConversationChain(
58
- llm=llm, verbose=True, memory=memory
59
- )
60
- return conversation_chain
61
-
 
17
  from langchain.llms import HuggingFaceHub
18
 
19
  class ConversationChainSingleton:
20
+ def __init__(self) -> None:
21
+ pass
22
+ def conversation_chain(self, text):
23
+ """
24
+ Create a conversational retrieval chain and a language model.
25
+
26
+ Returns:
27
+ - ConversationChain: The initialized conversation chain.
28
+ """
29
+ print(text)
30
+ llm = HuggingFaceHub(
31
+ repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
32
+ model_kwargs={"max_length": 1048, "temperature": 0.2, "max_new_tokens": 256, "top_p": 0.95, "repetition_penalty": 1.0},
33
+ )
34
+ memory = ConversationBufferMemory(memory_key="history", return_messages=True)
35
+ conversation_chain = ConversationChain(
36
+ llm=llm, verbose=True, memory=memory
37
+ )
38
+ return conversation_chain
39
+
40
  """
41
  A singleton class for managing a conversation chain instance.
42
 
 
50
 
51
 
52
  - get_conversation_chain(): Creates and returns a conversational retrieval chain and a language model.
53
+
54
 
55
  _instance = None
56
 
 
58
  if not cls._instance:
59
  cls._instance = super(ConversationChainSingleton, cls).__new__(cls)
60
  # Initialize your conversation chain here
61
+ cls._instance.conversation_chain = cls.get_conversation_chain(cls._instance)
62
  return cls._instance
63
+ """
64