azenabeel commited on
Commit
fdc7a58
1 Parent(s): bdfde83

Upload 4 files

Browse files
Files changed (4) hide show
  1. Pipfile +18 -0
  2. Pipfile.lock +0 -0
  3. app.py +72 -0
  4. requirements.txt +7 -0
Pipfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [[source]]
2
+ url = "https://pypi.org/simple"
3
+ verify_ssl = true
4
+ name = "pypi"
5
+
6
+ [packages]
7
+ streamlit = "*"
8
+ langchain = "*"
9
+ openai = "*"
10
+ tiktoken = "*"
11
+ python-dotenv = "*"
12
+ langchain-openai = "*"
13
+ streamlit-chat = "*"
14
+
15
+ [dev-packages]
16
+
17
+ [requires]
18
+ python_version = "3.11"
Pipfile.lock ADDED
The diff for this file is too large to render. See raw diff
 
app.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from streamlit_chat import message
3
+ from langchain_openai import OpenAI
4
+ from langchain.chains import ConversationChain
5
+ from langchain.chains.conversation.memory import (ConversationBufferMemory,
6
+ ConversationSummaryMemory,
7
+ ConversationBufferWindowMemory
8
+ )
9
+
10
+ if 'conversation' not in st.session_state:
11
+ st.session_state['conversation'] =None
12
+ if 'messages' not in st.session_state:
13
+ st.session_state['messages'] =[]
14
+ if 'API_Key' not in st.session_state:
15
+ st.session_state['API_Key'] =''
16
+
17
+ st.set_page_config(page_title="Chat GPT Clone", page_icon=":robot_face:")
18
+ st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True)
19
+
20
+
21
+ st.sidebar.title("Summary")
22
+ st.session_state['API_Key']= st.sidebar.text_input("Please enter your api key before chatting!",type="password")
23
+ summarise_button = st.sidebar.button("Summarise the conversation", key="summarise")
24
+ if summarise_button:
25
+ summarise_placeholder = st.sidebar.write("Nice chatting with you my friend :\n\n"+st.session_state['conversation'].memory.buffer)
26
+
27
+ def getresponse(userInput, api_key):
28
+
29
+ if st.session_state['conversation'] is None:
30
+
31
+ llm = OpenAI(
32
+ temperature=0,
33
+ openai_api_key=api_key,
34
+ model_name='gpt-3.5-turbo-instruct'
35
+ )
36
+
37
+ st.session_state['conversation'] = ConversationChain(
38
+ llm=llm,
39
+ verbose=True,
40
+ memory=ConversationSummaryMemory(llm=llm)
41
+ )
42
+
43
+ response=st.session_state['conversation'].predict(input=userInput)
44
+ print(st.session_state['conversation'].memory.buffer)
45
+
46
+
47
+ return response
48
+
49
+
50
+ response_container = st.container()
51
+ container = st.container()
52
+
53
+
54
+ with container:
55
+ with st.form(key='my_form', clear_on_submit=True):
56
+ user_input = st.text_area("Your question goes here:", key='input', height=100)
57
+ submit_button = st.form_submit_button(label='Send')
58
+
59
+ if submit_button:
60
+ st.session_state['messages'].append(user_input)
61
+ model_response=getresponse(user_input,st.session_state['API_Key'])
62
+ st.session_state['messages'].append(model_response)
63
+
64
+
65
+ with response_container:
66
+ for i in range(len(st.session_state['messages'])):
67
+ if (i % 2) == 0:
68
+ message(st.session_state['messages'][i], is_user=True, key=str(i) + '_user')
69
+ else:
70
+ message(st.session_state['messages'][i], key=str(i) + '_AI')
71
+
72
+
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ streamlit
2
+ streamlit-chat
3
+ langchain
4
+ openai
5
+ tiktoken
6
+ python-dotenv
7
+ langchain-openai