mlsc-tiet commited on
Commit
60084ee
1 Parent(s): fa39fd0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -16
app.py CHANGED
@@ -54,19 +54,29 @@ embeddings = model_norm
54
  db = FAISS.load_local(db_path,embeddings)
55
  print('Prompt Chain')
56
 
57
- custom_prompt_template = """You are a helpful bot designd for MLSC TIET that is Microsoft Student Learn Chapter,TIET which a technical society for thir website your task is to answer all queries about MLSC every answer you provide should be i context of MLSC if any question is not in that context then yyou should ecline that question by saying 'It is out of context',if you don't know the answer don't try to make it up just politely decline that question,you can extrapolayte the things a little just to be more informative but dont sound boasty and exaggerating say something else out of the context of the document,don't answer any questions that pertain to any specific persons and if questions about roles demnad names of position holders of MLSC give a general description of role instead of person
58
- You can accept some basic greetings to interact with the user but be sure to remisn in context of MLSC only
59
- Context: {context}
60
- Question: {question}
 
 
 
 
 
 
 
 
 
 
61
 
62
  Only return the helpful answer below and nothing else.
63
  Helpful answer:
64
  """
65
 
66
  prompt = PromptTemplate(template=custom_prompt_template,
67
- input_variables=['context', 'question'])
68
 
69
- memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
70
 
71
 
72
 
@@ -86,21 +96,24 @@ qa_chain = RetrievalQA.from_chain_type(llm=llm2,
86
  chain_type='stuff',
87
  retriever=db.as_retriever(search_kwargs={'k': 5}),
88
  return_source_documents=False,
89
- chain_type_kwargs={'prompt': prompt})
 
 
 
90
  history_df = pd.DataFrame(columns = ['Question','Answer'])
91
  def qa_bot(query):
92
- global history_df
93
  response = qa_chain({'query': query})
94
- print(response)
95
- response_df = pd.DataFrame.from_dict([response])
96
- response_df.rename(columns = {'query' : 'Question','result' : 'Answer'},inplace = True)
97
- history_df = pd.concat([history_df,response_df])
98
- history_df.reset_index(drop = True,inplace = True)
99
- history_df.to_csv('./responses.csv')
100
- print(history_df)
101
  return (response['result'])
102
 
103
- st.title("MLSCBot")
 
 
 
 
 
 
 
 
104
  st.image('./banner.png',use_column_width=True)
105
  if "messages" not in st.session_state:
106
  st.session_state.messages = []
 
54
  db = FAISS.load_local(db_path,embeddings)
55
  print('Prompt Chain')
56
 
57
+
58
+ custom_prompt_template = """You are a helpful bot designd for MLSC TIET that is Microsoft Student Learn Chapter,TIET which a technical society for thir website your task is to answer all queries about MLSC every answer you provide should be i context of MLSC if any question is not in that context then yyou should ecline that question by saying 'Sorry,I cannot help with that',if you don't know the answer don't try to make it up just politely decline that question,you can extrapolayte the things a little just to be more informative but dont sound boasty and exaggerating say something else out of the context of the document,don't answer any questions that pertain to any specific persons and if questions about roles demnad names of position holders of MLSC give a general description of role instead of person
59
+ You can accept some basic greetings to interact with the user but be sure to remain in context of MLSC only
60
+ Use the following context (delimited by <ctx></ctx>) and the chat history (delimited by <hs></hs>) to answer the question:
61
+ ------
62
+ <ctx>
63
+ {context}
64
+ </ctx>
65
+ ------
66
+ <hs>
67
+ {history}
68
+ </hs>
69
+ ------
70
+ {question}
71
 
72
  Only return the helpful answer below and nothing else.
73
  Helpful answer:
74
  """
75
 
76
  prompt = PromptTemplate(template=custom_prompt_template,
77
+ input_variables=['history','context', 'question'])
78
 
79
+ # memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
80
 
81
 
82
 
 
96
  chain_type='stuff',
97
  retriever=db.as_retriever(search_kwargs={'k': 5}),
98
  return_source_documents=False,
99
+ chain_type_kwargs={'prompt': prompt
100
+ ,'memory' : ConversationBufferMemory(
101
+ memory_key="history",
102
+ input_key = "question")})
103
  history_df = pd.DataFrame(columns = ['Question','Answer'])
104
  def qa_bot(query):
 
105
  response = qa_chain({'query': query})
 
 
 
 
 
 
 
106
  return (response['result'])
107
 
108
+ st.markdown("<h1 style='text-align: center; color: white; font: Charcuterie'>MLSCBot</h1>", unsafe_allow_html=True)
109
+ hide_streamlit_style = """
110
+ <style>
111
+ #MainMenu {visibility: hidden;}
112
+ header {visibility: hidden;}
113
+ footer {visibility: hidden;}
114
+ </style>
115
+ """
116
+ st.markdown(hide_streamlit_style, unsafe_allow_html=True)
117
  st.image('./banner.png',use_column_width=True)
118
  if "messages" not in st.session_state:
119
  st.session_state.messages = []