kaushik-anand commited on
Commit
d195bc3
1 Parent(s): 0f39610

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +145 -0
  2. requirements.txt +202 -0
app.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dataclasses import dataclass
2
+ from typing import Literal
3
+ import streamlit as st
4
+ import os
5
+ from llamaapi import LlamaAPI
6
+ from langchain_experimental.llms import ChatLlamaAPI
7
+ from langchain.embeddings import HuggingFaceEmbeddings
8
+ import pinecone
9
+ from langchain.vectorstores import Pinecone
10
+ from langchain.prompts import PromptTemplate
11
+ from langchain.chains import RetrievalQA
12
+ import streamlit.components.v1 as components
13
+ from langchain_groq import ChatGroq
14
+ from langchain.chains import ConversationalRetrievalChain
15
+ from langchain.memory import ChatMessageHistory, ConversationBufferMemory
16
+ import time
17
+
18
+ HUGGINGFACEHUB_API_TOKEN = st.secrets['HUGGINGFACEHUB_API_TOKEN']
19
+
20
+ @dataclass
21
+ class Message:
22
+ """Class for keeping track of a chat message."""
23
+ origin: Literal["👤 Human", "👨🏻‍⚖️ Ai"]
24
+ message: str
25
+
26
+
27
+ def download_hugging_face_embeddings():
28
+ embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
29
+ return embeddings
30
+
31
+
32
+ def initialize_session_state():
33
+ if "history" not in st.session_state:
34
+ st.session_state.history = []
35
+ if "conversation" not in st.session_state:
36
+ llama = LlamaAPI(st.secrets["LlamaAPI"])
37
+ model = ChatLlamaAPI(client=llama)
38
+ chat = ChatGroq(temperature=0.5, groq_api_key=st.secrets["Groq_api"], model_name="mixtral-8x7b-32768")
39
+
40
+ embeddings = download_hugging_face_embeddings()
41
+
42
+ # Initializing the Pinecone
43
+ pinecone.init(
44
+ api_key=st.secrets["PINECONE_API_KEY"], # find at app.pinecone.io
45
+ environment=st.secrets["PINECONE_API_ENV"] # next to api key in console
46
+ )
47
+ index_name = "legal-advisor" # put in the name of your pinecone index here
48
+
49
+ docsearch = Pinecone.from_existing_index(index_name, embeddings)
50
+
51
+ prompt_template = """
52
+ You are a trained bot to guide people about Indian Law. You will answer user's query with your knowledge and the context provided.
53
+ If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
54
+ Do not say thank you and tell you are an AI Assistant and be open about everything.
55
+ Use the following pieces of context to answer the users question.
56
+ Context: {context}
57
+ Question: {question}
58
+ Only return the helpful answer below and nothing else.
59
+ Helpful answer:
60
+ """
61
+
62
+ PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
63
+
64
+ #chain_type_kwargs = {"prompt": PROMPT}
65
+ message_history = ChatMessageHistory()
66
+ memory = ConversationBufferMemory(
67
+ memory_key="chat_history",
68
+ output_key="answer",
69
+ chat_memory=message_history,
70
+ return_messages=True,
71
+ )
72
+ retrieval_chain = ConversationalRetrievalChain.from_llm(llm=chat,
73
+ chain_type="stuff",
74
+ retriever=docsearch.as_retriever(
75
+ search_kwargs={'k': 2}),
76
+ return_source_documents=True,
77
+ combine_docs_chain_kwargs={"prompt": PROMPT},
78
+ memory= memory
79
+ )
80
+
81
+ st.session_state.conversation = retrieval_chain
82
+
83
+
84
+ def on_click_callback():
85
+ human_prompt = st.session_state.human_prompt
86
+ st.session_state.human_prompt=""
87
+ response = st.session_state.conversation(
88
+ human_prompt
89
+ )
90
+ llm_response = response['answer']
91
+ st.session_state.history.append(
92
+ Message("👤 Human", human_prompt)
93
+ )
94
+ st.session_state.history.append(
95
+ Message("👨🏻‍⚖️ Ai", llm_response)
96
+ )
97
+
98
+
99
+ initialize_session_state()
100
+
101
+ st.title("LegalEase Advisor Chatbot 🇮🇳")
102
+
103
+ st.markdown(
104
+ """
105
+ 👋 **Namaste! Welcome to LegalEase Advisor!**
106
+ I'm here to assist you with your legal queries within the framework of Indian law. Whether you're navigating through specific legal issues or seeking general advice, I'm here to help.
107
+
108
+ 📚 **How I Can Assist:**
109
+
110
+ - Answer questions on various aspects of Indian law.
111
+ - Guide you through legal processes relevant to India.
112
+ - Provide information on your rights and responsibilities as per Indian legal standards.
113
+
114
+ ⚖️ **Disclaimer:**
115
+
116
+ While I can provide general information, it's essential to consult with a qualified Indian attorney for advice tailored to your specific situation.
117
+
118
+ 🤖 **Getting Started:**
119
+
120
+ Feel free to ask any legal question related to Indian law, using keywords like "property rights," "labor laws," or "family law." I'm here to assist you!
121
+
122
+ Let's get started! How can I assist you today?
123
+ """
124
+ )
125
+
126
+ chat_placeholder = st.container()
127
+ prompt_placeholder = st.form("chat-form")
128
+
129
+ with chat_placeholder:
130
+ for chat in st.session_state.history:
131
+ st.markdown(f"{chat.origin} : {chat.message}")
132
+
133
+ with prompt_placeholder:
134
+ st.markdown("**Chat**")
135
+ cols = st.columns((6, 1))
136
+ cols[0].text_input(
137
+ "Chat",
138
+ label_visibility="collapsed",
139
+ key="human_prompt",
140
+ )
141
+ cols[1].form_submit_button(
142
+ "Submit",
143
+ type="primary",
144
+ on_click=on_click_callback,
145
+ )
requirements.txt ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiofiles==23.2.1
2
+ aiohttp==3.9.0
3
+ aiosignal==1.3.1
4
+ altair==5.1.2
5
+ annotated-types==0.6.0
6
+ anyio==3.7.1
7
+ argon2-cffi==23.1.0
8
+ argon2-cffi-bindings==21.2.0
9
+ arrow==1.3.0
10
+ asttokens==2.4.1
11
+ async-lru==2.0.4
12
+ asyncer==0.0.2
13
+ attrs==23.1.0
14
+ Babel==2.13.1
15
+ backoff==2.2.1
16
+ beautifulsoup4==4.12.2
17
+ bidict==0.22.1
18
+ bleach==6.1.0
19
+ blinker==1.7.0
20
+ cachetools==5.3.2
21
+ certifi==2023.11.17
22
+ cffi==1.16.0
23
+ charset-normalizer==3.3.2
24
+ click==8.1.7
25
+ colorama==0.4.6
26
+ comm==0.2.0
27
+ dataclasses-json==0.5.14
28
+ debugpy==1.8.0
29
+ decorator==5.1.1
30
+ defusedxml==0.7.1
31
+ Deprecated==1.2.14
32
+ dnspython==2.4.2
33
+ executing==2.0.1
34
+ faiss-cpu==1.7.4
35
+ fastapi==0.100.1
36
+ fastapi-socketio==0.0.10
37
+ fastjsonschema==2.19.0
38
+ filelock==3.13.1
39
+ filetype==1.2.0
40
+ fqdn==1.5.1
41
+ frozenlist==1.4.0
42
+ fsspec==2023.10.0
43
+ gitdb==4.0.11
44
+ GitPython==3.1.40
45
+ googleapis-common-protos==1.61.0
46
+ greenlet==3.0.1
47
+ grpcio==1.59.3
48
+ h11==0.14.0
49
+ httpcore==0.17.3
50
+ httpx==0.24.1
51
+ huggingface-hub==0.19.4
52
+ idna==3.4
53
+ importlib-metadata==6.8.0
54
+ ipykernel==6.26.0
55
+ ipython==8.17.2
56
+ ipywidgets==8.1.1
57
+ isoduration==20.11.0
58
+ jedi==0.19.1
59
+ Jinja2==3.1.2
60
+ joblib==1.3.2
61
+ json5==0.9.14
62
+ jsonpatch==1.33
63
+ jsonpointer==2.4
64
+ jsonschema==4.20.0
65
+ jsonschema-specifications==2023.11.1
66
+ jupyter-events==0.9.0
67
+ jupyter-lsp==2.2.0
68
+ jupyter_client==8.6.0
69
+ jupyter_core==5.5.0
70
+ jupyter_server==2.10.1
71
+ jupyter_server_terminals==0.4.4
72
+ jupyterlab==4.0.8
73
+ jupyterlab-pygments==0.2.2
74
+ jupyterlab-widgets==3.0.9
75
+ jupyterlab_server==2.25.1
76
+ langchain
77
+ langchain-experimental
78
+ langsmith
79
+ Lazify==0.4.0
80
+ llamaapi
81
+ loguru==0.7.2
82
+ markdown-it-py==3.0.0
83
+ MarkupSafe==2.1.3
84
+ marshmallow==3.20.1
85
+ matplotlib-inline==0.1.6
86
+ mdurl==0.1.2
87
+ mistune==3.0.2
88
+ mpmath==1.3.0
89
+ multidict==6.0.4
90
+ mypy-extensions==1.0.0
91
+ nbclient==0.9.0
92
+ nbconvert==7.11.0
93
+ nbformat==5.9.2
94
+ nest-asyncio==1.5.8
95
+ networkx==3.2.1
96
+ nltk==3.8.1
97
+ notebook==7.0.6
98
+ notebook_shim==0.2.3
99
+ numpy==1.26.2
100
+ opentelemetry-api==1.21.0
101
+ opentelemetry-exporter-otlp==1.21.0
102
+ opentelemetry-exporter-otlp-proto-common==1.21.0
103
+ opentelemetry-exporter-otlp-proto-grpc==1.21.0
104
+ opentelemetry-exporter-otlp-proto-http==1.21.0
105
+ opentelemetry-instrumentation==0.42b0
106
+ opentelemetry-proto==1.21.0
107
+ opentelemetry-sdk==1.21.0
108
+ opentelemetry-semantic-conventions==0.42b0
109
+ overrides==7.4.0
110
+ packaging==23.2
111
+ pandas==2.1.3
112
+ pandocfilters==1.5.0
113
+ parso==0.8.3
114
+ Pillow==10.1.0
115
+ pinecone-client==2.2.4
116
+ platformdirs==4.0.0
117
+ prometheus-client==0.18.0
118
+ prompt-toolkit==3.0.41
119
+ protobuf==4.25.1
120
+ psutil==5.9.6
121
+ pure-eval==0.2.2
122
+ pyarrow==14.0.1
123
+ pycparser==2.21
124
+ pydantic==2.5.1
125
+ pydantic_core==2.14.3
126
+ pydeck==0.8.1b0
127
+ Pygments==2.16.1
128
+ PyJWT==2.8.0
129
+ pypdf==3.17.1
130
+ pypdfium2==4.24.0
131
+ python-dateutil==2.8.2
132
+ python-dotenv==1.0.0
133
+ python-engineio==4.8.0
134
+ python-graphql-client==0.4.3
135
+ python-json-logger==2.0.7
136
+ python-multipart==0.0.6
137
+ python-socketio==5.10.0
138
+ pytz==2023.3.post1
139
+ PyYAML==6.0.1
140
+ pyzmq==25.1.1
141
+ referencing==0.31.0
142
+ regex==2023.10.3
143
+ requests==2.31.0
144
+ rfc3339-validator==0.1.4
145
+ rfc3986-validator==0.1.1
146
+ rich==13.7.0
147
+ rpds-py==0.13.0
148
+ safetensors==0.4.0
149
+ scikit-learn==1.3.2
150
+ scipy==1.11.4
151
+ Send2Trash==1.8.2
152
+ sentence-transformers==2.2.2
153
+ sentencepiece==0.1.99
154
+ simple-websocket==1.0.0
155
+ six==1.16.0
156
+ smmap==5.0.1
157
+ sniffio==1.3.0
158
+ soupsieve==2.5
159
+ SQLAlchemy==2.0.23
160
+ stack-data==0.6.3
161
+ starlette==0.27.0
162
+ streamlit==1.28.2
163
+ sympy==1.12
164
+ syncer==2.0.3
165
+ tenacity==8.2.3
166
+ terminado==0.18.0
167
+ threadpoolctl==3.2.0
168
+ tinycss2==1.2.1
169
+ tokenizers==0.15.0
170
+ toml==0.10.2
171
+ tomli==2.0.1
172
+ toolz==0.12.0
173
+ torch==2.1.1
174
+ torchvision==0.16.1
175
+ tornado==6.3.3
176
+ tqdm==4.66.1
177
+ traitlets==5.13.0
178
+ transformers==4.35.2
179
+ types-python-dateutil==2.8.19.14
180
+ typing-inspect==0.9.0
181
+ typing_extensions==4.8.0
182
+ tzdata==2023.3
183
+ tzlocal==5.2
184
+ uptrace==1.21.0
185
+ uri-template==1.3.0
186
+ urllib3==2.1.0
187
+ uvicorn==0.23.2
188
+ validators==0.22.0
189
+ watchdog==3.0.0
190
+ watchfiles==0.20.0
191
+ wcwidth==0.2.10
192
+ webcolors==1.13
193
+ webencodings==0.5.1
194
+ websocket-client==1.6.4
195
+ websockets==12.0
196
+ widgetsnbextension==4.0.9
197
+ win32-setctime==1.1.0
198
+ wrapt==1.16.0
199
+ wsproto==1.2.0
200
+ yarl==1.9.2
201
+ zipp==3.17.0
202
+ langchain-groq