jonathanjordan21 commited on
Commit
20d5ffa
1 Parent(s): 5bac23f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -11
app.py CHANGED
@@ -11,7 +11,7 @@ from typing import Optional
11
 
12
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
13
  from langchain_core.chat_history import BaseChatMessageHistory
14
- from langchain.memory import ConversationBufferMemory#, PostgresChatMessageHistory
15
 
16
 
17
 
@@ -19,20 +19,38 @@ from langchain.memory import ConversationBufferMemory#, PostgresChatMessageHisto
19
  API_TOKEN = os.getenv('HF_INFER_API')
20
  POSTGRE_URL = os.environ['POSTGRE_URL']
21
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
 
24
  if 'memory' not in st.session_state:
25
- st.session_state['memory'] = ConversationBufferMemory(return_messages=True)
26
 
27
  # st.session_state.memory = PostgresChatMessageHistory(connection_string=POSTGRE_URL, session_id=str(datetime.timestamp(datetime.now())))
 
 
28
  st.session_state.memory.chat_memory.add_ai_message("Hello, My name is Jonathan Jordan. You can call me Jojo. How can I help you today?")
29
 
30
  if 'chain' not in st.session_state:
31
- st.session_state['chain'] = custom_chain_with_history(
32
- llm=CustomLLM(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", model_type='text-generation', api_token=API_TOKEN, stop=["\n<|","<|"], temperature=0.001),
33
- memory=st.session_state.memory.chat_memory,
34
- # memory=st.session_state.memory
35
- )
 
 
 
 
36
 
37
  st.title("Chat With Me")
38
  st.subheader("by Jonathan Jordan")
@@ -58,10 +76,10 @@ if prompt := st.chat_input("Ask me anything.."):
58
  # Display assistant response in chat message container
59
  with st.chat_message("assistant"):
60
  st.markdown(response)
61
- # st.session_state.memory.add_user_message(prompt)
62
- # st.session_state.memory.add_ai_message(response)
63
- st.session_state.memory.save_context({"question":prompt}, {"output":response})
64
- st.session_state.memory.chat_memory.messages = st.session_state.memory.chat_memory.messages[-15:]
65
  # Add assistant response to chat history
66
  st.session_state.messages.append({"role": "assistant", "content": response})
67
 
 
11
 
12
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
13
  from langchain_core.chat_history import BaseChatMessageHistory
14
+ from langchain.memory import ConversationBufferMemory, PostgresChatMessageHistory
15
 
16
 
17
 
 
19
  API_TOKEN = os.getenv('HF_INFER_API')
20
  POSTGRE_URL = os.environ['POSTGRE_URL']
21
 
22
+ @st.cache_resource
23
+ def get_llm_chain():
24
+ return custom_chain_with_history(
25
+ llm=CustomLLM(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", model_type='text-generation', api_token=API_TOKEN, stop=["\n<|","<|"], temperature=0.001),
26
+ # memory=st.session_state.memory.chat_memory,
27
+ memory=st.session_state.memory
28
+ )
29
+
30
+
31
+ @st.cache_resource
32
+ def get_memory():
33
+ return PostgresChatMessageHistory(connection_string=POSTGRE_URL, session_id=str(datetime.timestamp(datetime.now())))
34
 
35
 
36
  if 'memory' not in st.session_state:
37
+ # st.session_state['memory'] = ConversationBufferMemory(return_messages=True)
38
 
39
  # st.session_state.memory = PostgresChatMessageHistory(connection_string=POSTGRE_URL, session_id=str(datetime.timestamp(datetime.now())))
40
+
41
+ st.sessio_state.memory = get_memory()
42
  st.session_state.memory.chat_memory.add_ai_message("Hello, My name is Jonathan Jordan. You can call me Jojo. How can I help you today?")
43
 
44
  if 'chain' not in st.session_state:
45
+ # st.session_state['chain'] = custom_chain_with_history(
46
+ # llm=CustomLLM(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", model_type='text-generation', api_token=API_TOKEN, stop=["\n<|","<|"], temperature=0.001),
47
+ # memory=st.session_state.memory.chat_memory,
48
+ # # memory=st.session_state.memory
49
+ # )
50
+
51
+ st.session_state['chain'] = get_llm_chain()
52
+
53
+
54
 
55
  st.title("Chat With Me")
56
  st.subheader("by Jonathan Jordan")
 
76
  # Display assistant response in chat message container
77
  with st.chat_message("assistant"):
78
  st.markdown(response)
79
+ st.session_state.memory.add_user_message(prompt)
80
+ st.session_state.memory.add_ai_message(response)
81
+ # st.session_state.memory.save_context({"question":prompt}, {"output":response})
82
+ # st.session_state.memory.chat_memory.messages = st.session_state.memory.chat_memory.messages[-15:]
83
  # Add assistant response to chat history
84
  st.session_state.messages.append({"role": "assistant", "content": response})
85