suriya7 commited on
Commit
3020581
1 Parent(s): 519b9f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -36
app.py CHANGED
@@ -20,7 +20,7 @@ llm = ChatGoogleGenerativeAI(model="gemini-pro",
20
 
21
 
22
 
23
- template = """You are a friendly chatbot called "CRETA" who give clear an well having a conversation with a human and you are created by suriya an AI Enthusiastic.
24
  provied_url_extracted_text:
25
  {extracted_text}
26
  provided document:
@@ -46,12 +46,14 @@ llm_chain = LLMChain(
46
 
47
  previous_response = ""
48
  provided_docs = ""
 
49
  def conversational_chat(query):
50
- global previous_response, provided_docs
51
- previous_response = "".join([f"Human: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None])
52
- provided_docs = "".join([doc for doc in st.session_state["docs"] if doc is not None])
53
- extracted_text = "".join([text for text in st.session_state["extracted_text"] if text is not None])
54
- print(extracted_text)
 
55
 
56
  result = llm_chain.predict(
57
  chat_history=previous_response,
@@ -63,19 +65,12 @@ def conversational_chat(query):
63
  return result
64
 
65
  st.title("Chat Bot:")
66
- st.text("I am CRETA Your Friendly Assitant")
67
  st.markdown("Built by [Suriya❤️](https://github.com/theSuriya)")
68
 
69
  if 'history' not in st.session_state:
70
  st.session_state['history'] = []
71
 
72
- # Initialize messages
73
- if 'generated' not in st.session_state:
74
- st.session_state['generated'] = ["Hello ! Ask me anything"]
75
-
76
- if 'past' not in st.session_state:
77
- st.session_state['past'] = [" "]
78
-
79
  if 'docs' not in st.session_state:
80
  st.session_state['docs'] = []
81
 
@@ -89,6 +84,10 @@ def get_pdf_text(pdf_docs):
89
  for page in pdf_reader.pages:
90
  text += page.extract_text()
91
  return text
 
 
 
 
92
 
93
  def get_url_text(url_link):
94
  try:
@@ -105,9 +104,9 @@ def get_url_text(url_link):
105
 
106
 
107
  with st.sidebar:
108
- st.title("Add a file for CRETA memory:")
109
  uploaded_files = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
110
- uploaded_url = st.text_area("Please upload a URL:")
111
 
112
  if st.button("Submit & Process"):
113
  if uploaded_files or uploaded_url:
@@ -124,24 +123,22 @@ with st.sidebar:
124
  else:
125
  st.error("Please upload at least one PDF file or provide a URL.")
126
 
127
- # Create containers for chat history and user input
128
- response_container = st.container()
129
- container = st.container()
130
-
131
- # User input form
132
- user_input = st.chat_input("Ask Your Questions 👉..")
133
- with container:
134
- if user_input:
135
- output = conversational_chat(user_input)
136
- # answer = response_generator(output)
137
- st.session_state['past'].append(user_input)
138
- st.session_state['generated'].append(output)
139
-
140
 
141
- # Display chat history
142
- if st.session_state['generated']:
143
- with response_container:
144
- for i in range(len(st.session_state['generated'])):
145
- if i != 0:
146
- message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
147
- message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")
 
 
 
 
 
 
20
 
21
 
22
 
23
+ template = """You are a friendly chatbot called "Chatto" who give clear an well having a conversation with a human and you are created by suriya an AI Enthusiastic.If user query anything about link try to use "provied_url_extracted_text content".
24
  provied_url_extracted_text:
25
  {extracted_text}
26
  provided document:
 
46
 
47
  previous_response = ""
48
  provided_docs = ""
49
+ extracted_text = ""
50
  def conversational_chat(query):
51
+ global previous_response, provided_docs,extracted_text
52
+ for i in st.session_state['history']:
53
+ if i is not None:
54
+ previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}\n"
55
+ provided_docs = "".join(st.session_state["docs"])
56
+ extracted_text = "".join(st.session_state["extracted_text"])
57
 
58
  result = llm_chain.predict(
59
  chat_history=previous_response,
 
65
  return result
66
 
67
  st.title("Chat Bot:")
68
+ st.text("I am Chatto Your Friendly Assitant")
69
  st.markdown("Built by [Suriya❤️](https://github.com/theSuriya)")
70
 
71
  if 'history' not in st.session_state:
72
  st.session_state['history'] = []
73
 
 
 
 
 
 
 
 
74
  if 'docs' not in st.session_state:
75
  st.session_state['docs'] = []
76
 
 
84
  for page in pdf_reader.pages:
85
  text += page.extract_text()
86
  return text
87
+ def response_streaming(text):
88
+ for i in text:
89
+ yield i
90
+ time.sleep(0.01)
91
 
92
  def get_url_text(url_link):
93
  try:
 
104
 
105
 
106
  with st.sidebar:
107
+ st.title("Add a file for Chatto memory:")
108
  uploaded_files = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
109
+ uploaded_url = st.text_input("Please upload a URL:")
110
 
111
  if st.button("Submit & Process"):
112
  if uploaded_files or uploaded_url:
 
123
  else:
124
  st.error("Please upload at least one PDF file or provide a URL.")
125
 
126
+ if 'messages' not in st.session_state:
127
+ st.session_state.messages = [{'role': 'assistant', "content": "I'm Here to help you questions"}]
128
+
129
+ for message in st.session_state.messages:
130
+ with st.chat_message(message['role']):
131
+ st.write(message['content'])
 
 
 
 
 
 
 
132
 
133
+ user_input = st.chat_input("Ask Your Questions 👉..")
134
+ if user_input:
135
+ st.session_state.messages.append({'role': 'user', "content": user_input})
136
+ with st.chat_message("user"):
137
+ st.write(user_input)
138
+ response = conversational_chat(user_input)
139
+ # stream = response_streaming(response)
140
+
141
+ with st.chat_message("assistant"):
142
+ full_response = st.write_stream(response_streaming(response))
143
+ message = {"role": "assistant", "content": response}
144
+ st.session_state.messages.append(message)