fmmkii commited on
Commit
297c5bc
·
1 Parent(s): 0310da6

Updated app.py

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +74 -59
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ /.streamlit
app.py CHANGED
@@ -2,45 +2,69 @@ import streamlit as st
2
  from langchain_community.llms import Replicate
3
  from langchain.chains import ConversationChain
4
  from langchain.memory import ConversationSummaryMemory
5
- from langchain.prompts import ChatPromptTemplate
6
  import os
7
 
8
  # Setting page title and header
9
  st.set_page_config(page_title="Chat GPT Clone", page_icon="🤖")
10
  st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True)
11
 
12
- replicate_api = st.secrets['REPLICATE_API_TOKEN']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  # Sidebar for API key input and summarisation
14
  with st.sidebar:
15
- # Check if API key is provided via st.secrets
16
- if 'REPLICATE_API_TOKEN' in st.secrets:
17
- if not (replicate_api.startswith('r8_') and len(replicate_api)==40):
18
- replicate_api = st.text_input('Enter your Replicate API token:', type= 'password')
19
- st.warning('Invalid or no API key! Please try again')
20
- else:
21
- st.success('Try asking now!')
22
  else:
23
- replicate_api = st.text_input('Enter your Replicate API token:', type= 'password')
24
-
 
25
  os.environ['REPLICATE_API_TOKEN'] = replicate_api
26
-
27
- # Button to summarise the conversation
28
- summarise_button = st.button("Summarise the conversation", key="summarise_button")
29
 
30
- # Check if the summarise button was clicked
 
31
  if summarise_button:
32
  if st.session_state.get('conversation') and st.session_state['conversation'].memory:
33
- # Generate the summary
34
  summary = st.session_state['conversation'].memory.buffer
35
- st.session_state['summary'] = summary # Store in session state
36
  st.write("Summary:\n\n" + summary)
37
  else:
38
- # Handle case where no conversation memory exists
39
- summary = "No conversation to summarise leh!"
40
- st.write(summary)
41
 
42
- # Add a download button if a summary exists
43
- if st.session_state.get('summary'):
44
  st.download_button(
45
  label="Download Summary",
46
  data=st.session_state['summary'],
@@ -48,61 +72,52 @@ with st.sidebar:
48
  mime="text/plain",
49
  )
50
 
51
- # Initialize conversation state if not already done
52
- if 'conversation' not in st.session_state:
53
- st.session_state['conversation'] = None
54
- if 'messages' not in st.session_state:
55
- st.session_state['messages'] = []
56
- if 'REPLICATE_API_TOKEN' not in st.session_state:
57
- st.session_state['REPLICATE_API_TOKEN'] = ''
58
 
59
- # Function to get response from OpenAI model
60
- def get_response(user_input, replicate_api):
61
- if st.session_state['conversation'] is None:
62
- llm = Replicate(
63
- model=("meta/meta-llama-3-8b-instruct"),
64
- model_kwargs={
65
- "temperature": 0.01,
66
- "top_p": 0.9,
67
- "max_length": 128,
68
- },
69
- )
70
- st.session_state['conversation'] = ConversationChain(
71
- llm=llm,
72
- verbose=True,
73
- memory=ConversationSummaryMemory(llm=llm),
74
- )
75
- # Call the conversation chain
 
76
  response_dict = st.session_state['conversation'].invoke(input=user_input)
77
-
78
- # Extract the response field only
79
- return response_dict.get("response", "No response generated leh!")
80
 
81
  # Function to get user input
82
  def get_text():
83
- chat_input = st.chat_input("Say Hello")
84
- return chat_input
85
 
86
- # Display chat history
87
  if st.session_state.messages:
88
  for msg in st.session_state.messages:
89
  with st.chat_message(msg["role"]):
90
  st.markdown(msg["content"])
91
 
92
- # Get user input
93
  user_input = get_text()
94
-
95
  if user_input:
96
  try:
97
- # Get the response from the LLM
98
- response = get_response(user_input, st.session_state['REPLICATE_API_TOKEN'])
99
-
100
- # Append the user message to session state
101
  st.session_state.messages.append({"role": "user", "content": user_input})
102
  with st.chat_message("user"):
103
  st.markdown(user_input)
104
 
105
- # Append and display the LLM response
 
 
 
106
  st.session_state.messages.append({"role": "LLM", "content": response})
107
  with st.chat_message("LLM"):
108
  st.markdown(response)
 
2
  from langchain_community.llms import Replicate
3
  from langchain.chains import ConversationChain
4
  from langchain.memory import ConversationSummaryMemory
 
5
  import os
6
 
7
  # Setting page title and header
8
  st.set_page_config(page_title="Chat GPT Clone", page_icon="🤖")
9
  st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True)
10
 
11
+ # Initialize session state if not already done
12
+ if "messages" not in st.session_state:
13
+ st.session_state.messages = [{"role": "assistant", "content": "Hey there! Feel free to ask me anything. What can I do for you?"}]
14
+ if "conversation" not in st.session_state:
15
+ st.session_state["conversation"] = None
16
+ if "REPLICATE_API_TOKEN" not in st.session_state:
17
+ st.session_state["REPLICATE_API_TOKEN"] = ""
18
+
19
+ def clear_chat_history():
20
+ # Clear the chat history
21
+ st.session_state.messages = [{"role": "assistant", "content": "Okay, let's start over, what are we talking about now?"}]
22
+
23
+ # Reset the conversation memory (clear previous memory)
24
+ if 'conversation' in st.session_state:
25
+ # Reinitialize the memory to reset the conversation memory state
26
+ llm = Replicate(
27
+ model="meta/meta-llama-3-8b-instruct",
28
+ model_kwargs={
29
+ "temperature": 0.01,
30
+ "top_p": 0.9,
31
+ "max_length": 128,
32
+ },
33
+ )
34
+ # Recreate the conversation chain with a fresh memory
35
+ st.session_state['conversation'] = ConversationChain(
36
+ llm=llm,
37
+ verbose=True,
38
+ memory=ConversationSummaryMemory(llm=llm),
39
+ )
40
+
41
  # Sidebar for API key input and summarisation
42
  with st.sidebar:
43
+ # Get Replicate API Token
44
+ replicate_api = st.secrets.get('REPLICATE_API_TOKEN', '')
45
+ if not replicate_api or not (replicate_api.startswith('r8_') and len(replicate_api) == 40):
46
+ replicate_api = st.text_input('Enter your Replicate API token:', type='password')
47
+ if not replicate_api:
48
+ st.warning('API token is required for this app to work!')
 
49
  else:
50
+ st.success('Replicate API token is valid!')
51
+
52
+ # Set environment variable for Replicate
53
  os.environ['REPLICATE_API_TOKEN'] = replicate_api
54
+ st.session_state['REPLICATE_API_TOKEN'] = replicate_api
 
 
55
 
56
+ # Summarise button logic
57
+ summarise_button = st.button("Summarise the conversation", key="summarise_button")
58
  if summarise_button:
59
  if st.session_state.get('conversation') and st.session_state['conversation'].memory:
 
60
  summary = st.session_state['conversation'].memory.buffer
61
+ st.session_state['summary'] = summary
62
  st.write("Summary:\n\n" + summary)
63
  else:
64
+ st.write("No conversation to summarise.")
 
 
65
 
66
+ # Add download button for summary
67
+ if 'summary' in st.session_state:
68
  st.download_button(
69
  label="Download Summary",
70
  data=st.session_state['summary'],
 
72
  mime="text/plain",
73
  )
74
 
75
+ clear_chat = st.button('Clear Chat History', on_click=clear_chat_history)
 
 
 
 
 
 
76
 
77
+ # Initialize conversation state if not done already
78
+ if 'conversation' not in st.session_state or st.session_state['conversation'] is None:
79
+ llm = Replicate(
80
+ model="meta/meta-llama-3-8b-instruct",
81
+ model_kwargs={
82
+ "temperature": 0.01,
83
+ "top_p": 0.9,
84
+ "max_length": 128,
85
+ },
86
+ )
87
+ st.session_state['conversation'] = ConversationChain(
88
+ llm=llm,
89
+ verbose=True,
90
+ memory=ConversationSummaryMemory(llm=llm),
91
+ )
92
+
93
+ # Function to get the response from the LLM
94
+ def get_response(user_input):
95
  response_dict = st.session_state['conversation'].invoke(input=user_input)
96
+ return response_dict.get("response", "No response generated!")
 
 
97
 
98
  # Function to get user input
99
  def get_text():
100
+ return st.chat_input("Say something!")
 
101
 
102
+ # Display the chat history (including the initial message)
103
  if st.session_state.messages:
104
  for msg in st.session_state.messages:
105
  with st.chat_message(msg["role"]):
106
  st.markdown(msg["content"])
107
 
108
+ # Handle user input and conversation flow
109
  user_input = get_text()
 
110
  if user_input:
111
  try:
112
+ # Append user input to messages
 
 
 
113
  st.session_state.messages.append({"role": "user", "content": user_input})
114
  with st.chat_message("user"):
115
  st.markdown(user_input)
116
 
117
+ # Get response from LLM
118
+ response = get_response(user_input)
119
+
120
+ # Append LLM response to messages
121
  st.session_state.messages.append({"role": "LLM", "content": response})
122
  with st.chat_message("LLM"):
123
  st.markdown(response)