mlsc-tiet commited on
Commit
3c54435
1 Parent(s): 6a0bdf3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +123 -0
app.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import streamlit as st
3
+ import google.generativeai as palm
4
+ import pandas as pd
5
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
6
+ from langchain.embeddings import HuggingFaceEmbeddings
7
+ from langchain.embeddings import GooglePalmEmbeddings
8
+ from langchain.llms import GooglePalm
9
+ from langchain.document_loaders import PyPDFLoader,DirectoryLoader
10
+ # from langchain.llms import CTransformers
11
+ from langchain.vectorstores import FAISS
12
+ from langchain.chains import RetrievalQA,ConversationalRetrievalChain
13
+ from langchain.prompts import PromptTemplate
14
+ from langchain.cache import InMemoryCache
15
+ from langchain.llms import VLLM
16
+ from langchain.memory.buffer import ConversationBufferMemory
17
+ from langchain.chains.conversation.memory import ConversationSummaryBufferMemory
18
+ import gradio as gr
19
+ import requests
20
+ import os
21
+ from langchain.embeddings import HuggingFaceBgeEmbeddings
22
+ os.environ['GOOGLE_API_KEY']="AIzaSyABa4iATV5mQh0QmjGB-MntkydiGl4u0so"
23
+
24
+ # models = [m for m in palm.list_models() if "generateText" in m.supported_generation_methods]
25
+ # model = models[0].name
26
+ # print('Imports Done')
27
+
28
+
29
+ db_path = './vectordb/db_faiss'
30
+
31
+ # print('Reading Document')
32
+ # os.mkdir('/home/Sparsh/data')
33
+ # url = 'https://ia803106.us.archive.org/13/items/Encyclopedia_Of_Agriculture_And_Food_Systems/Encyclopedia%20of%20Agriculture%20and%20Food%20Systems.pdf'
34
+ # response = requests.get(url)
35
+ # with open('/home/Sparsh/data/document.pdf', 'wb') as f:
36
+ # f.write(response.content)
37
+ #
38
+ # print('Creating Chunks')
39
+
40
+
41
+ # loader = DirectoryLoader('C:/Users/HP/PycharmProjects/MLSCBot/venv/MLSCBot',glob = "*.pdf",loader_cls = PyPDFLoader)
42
+ # data = loader.load()
43
+ # splitter = RecursiveCharacterTextSplitter(chunk_size = 500,chunk_overlap = 100)
44
+ # chunks = splitter.split_documents(data)
45
+ #
46
+ # print('Mapping Embeddings')
47
+ model_name = "BAAI/bge-base-en"
48
+ encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity
49
+
50
+ model_norm = HuggingFaceBgeEmbeddings(model_name=model_name,
51
+ encode_kwargs=encode_kwargs)
52
+ embeddings = model_norm
53
+ # db = FAISS.from_documents(chunks,embeddings)
54
+ # db.save_local(db_path)
55
+ db = FAISS.load_local(db_path,embeddings)
56
+ print('Prompt Chain')
57
+
58
+ custom_prompt_template = """You are a helpful bot designd for MLSC TIET that is Microsoft Student Learn Chapter,TIET which a technical society for thir website your task is to answer all queries about MLSC every answer you provide should be i context of MLSC if any question is not in that context then yyou should ecline that question by saying 'It is out of context',if you don't know the answer don't try to make it up just politely decline that question,you can extrapolayte the things a little just to be more informative but dont sound boasty and exaggerating say something else out of the context of the document,don't answer any questions that pertain to any specific persons and if questions about roles demnad names of position holders of MLSC give a general description of role instead of person
59
+ You can accept some basic greetings to interact with the user but be sure to remisn in context of MLSC only
60
+ Context: {context}
61
+ Question: {question}
62
+
63
+ Only return the helpful answer below and nothing else.
64
+ Helpful answer:
65
+ """
66
+
67
+ prompt = PromptTemplate(template=custom_prompt_template,
68
+ input_variables=['context', 'question'])
69
+
70
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
71
+
72
+
73
+
74
+ print('Creating LLM')
75
+
76
+ llm2 = GooglePalm(
77
+ max_new_tokens=1024,
78
+ top_k=10,
79
+ top_p=0.5,
80
+ temperature=0.5)
81
+
82
+ print(llm2("What is the capital of France ?"))
83
+ # qa_chain = ConversationalRetrievalChain.from_llm(llm2,retriever=db.as_retriever(search_kwargs={'k': 2}),
84
+ # return_source_documents=False,
85
+ # memory=memory)
86
+ qa_chain = RetrievalQA.from_chain_type(llm=llm2,
87
+ chain_type='stuff',
88
+ retriever=db.as_retriever(search_kwargs={'k': 5}),
89
+ return_source_documents=False,
90
+ chain_type_kwargs={'prompt': prompt})
91
+ history_df = pd.DataFrame(columns = ['Question','Answer'])
92
+ def qa_bot(query):
93
+ response = qa_chain({'query': query})
94
+ return (response['result'])
95
+
96
+ st.title("MLSCBot")
97
+ st.image('./banner.png',use_column_width=True)
98
+ if "messages" not in st.session_state:
99
+ st.session_state.messages = []
100
+
101
+ for message in st.session_state.messages:
102
+ with st.chat_message(message["role"]):
103
+ st.markdown(message["content"])
104
+
105
+ if prompt := st.chat_input("Hello!How can I help you?"):
106
+ st.session_state.messages.append({"role": "user", "content": prompt})
107
+ with st.chat_message("user"):
108
+ st.markdown(prompt)
109
+ with st.chat_message("assistant"):
110
+ message_placeholder = st.empty()
111
+ full_response = qa_bot(prompt)
112
+ message_placeholder.markdown(full_response + "▌")
113
+ message_placeholder.markdown(full_response)
114
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
115
+ # with gr.Blocks(theme='upsatwal/mlsc_tiet') as demo:
116
+ # title = gr.HTML("<h1>MLSCBot</h1>")
117
+ # with gr.Row():
118
+ # img = gr.Image('C:/Users/HP/Downlo0ads/banner.png',label = 'MLSC Logo',show_label = False,elem_id = 'image',height = 200)
119
+ # input = gr.Textbox(label="How can I assist you?") # Textbox for user input
120
+ # output = gr.Textbox(label="Here you go:") # Textbox for chatbot response
121
+ # btn = gr.Button(value="Answer",elem_classes="button-chatbot",variant = "primary") # Button to trigger the agent call
122
+ # btn.click(fn=qa_bot, inputs=input,outputs=output)
123
+ # demo.launch(share=True, debug=True,show_api = False,show_error = False)