annas4421 commited on
Commit
648a7c5
1 Parent(s): 1709c69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -17
app.py CHANGED
@@ -5,14 +5,36 @@ from langchain_community.vectorstores import FAISS
5
  import os
6
  from langchain.prompts import PromptTemplate
7
  from langchain_together import Together
8
- import os
 
 
9
  from langchain.memory import ConversationBufferWindowMemory
10
  from langchain.chains import ConversationalRetrievalChain
11
  import streamlit as st
12
  import time
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
- embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
 
16
  #vectordb = Chroma.from_documents(texts, embedding=embeddings, persist_directory="./data")
17
  #db_retriever =vectordb.as_retriever(search_type="similarity",search_kwargs={'k':4})
18
 
@@ -62,29 +84,17 @@ if "messages" not in st.session_state:
62
  if "memory" not in st.session_state:
63
  st.session_state.memory = ConversationBufferWindowMemory(k=2, memory_key="chat_history",return_messages=True)
64
 
65
- embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
66
  #db=FAISS.load_local("/content/ipc_vector_db", embeddings, allow_dangerous_deserialization=True)
67
 
68
- prompt_template = """<s>[INST]This is a chat template and As a legal chat bot specializing in pakistan Penal Code queries and , your primary objective is to provide accurate and concise information based on the user's questions. Do not generate your own questions and answers. You will adhere strictly to the instructions provided, offering relevant context from the knowledge base while avoiding unnecessary details. Your responses will be brief, to the point, and in compliance with the established format. If a question falls outside the given context, you will refrain from utilizing the chat history and instead rely on your own knowledge base to generate an appropriate response. You will prioritize the user's query and refrain from posing additional questions. The aim is to deliver professional, precise, and contextually relevant information pertaining to the Indian Penal Code.
69
- CONTEXT: {context}
70
- CHAT HISTORY: {chat_history}
71
- QUESTION: {question}
72
- ANSWER:
73
- </s>[INST]
74
- """
75
 
76
- prompt = PromptTemplate(template=prompt_template,
77
  input_variables=['context', 'question', 'chat_history'])
78
 
79
  # You can also use other LLMs options from https://python.langchain.com/docs/integrations/llms. Here I have used TogetherAI API
80
 
81
  from config import together_api
82
- llm = Together(
83
- model="mistralai/Mistral-7B-Instruct-v0.2",
84
- temperature=0.5,
85
- max_tokens=1024,
86
- together_api_key=together_api
87
- )
88
  qa = ConversationalRetrievalChain.from_llm(
89
  llm=llm,
90
  memory=st.session_state.memory,
 
5
  import os
6
  from langchain.prompts import PromptTemplate
7
  from langchain_together import Together
8
+ from langchain.chat_models import ChatOpenAI
9
+ from htmlTemplates import css, bot_template, user_template
10
+ from langchain.embeddings import openai
11
  from langchain.memory import ConversationBufferWindowMemory
12
  from langchain.chains import ConversationalRetrievalChain
13
  import streamlit as st
14
  import time
15
 
16
+ from openai import OpenAI
17
+ api_key = os.getenv("OPENAI_API_KEY")
18
+ client = OpenAI(api_key=api_key)
19
+
20
+
21
+ # creating custom template to guide llm model
22
+ custom_template ="""<s>[INST]You will start the conversation by greeting the user and introducing yourself as qanoon-bot,\
23
+ stating your availability for legal assistance. Your next step will depend on the user's response.\
24
+ If the user expresses a need for legal assistance in Pakistan, you will ask them to describe their case or problem.\
25
+ After receiving the case or problem details from the user, you will provide the solutions and procedures according to the knowledge base and also give related penal codes and procedures. \
26
+ However, if the user does not require legal assistance in Pakistan, you will immediately thank them and\
27
+ say goodbye, ending the conversation. Remember to base your responses on the user's needs, providing accurate and\
28
+ concise information regarding the Pakistan legal law and rights where applicable. Your interactions should be professional and\
29
+ focused, ensuring the user's queries are addressed efficiently without deviating from the set flows.\
30
+ CHAT HISTORY: {chat_history}
31
+ QUESTION: {question}
32
+ ANSWER:
33
+ </s>[INST]
34
+ """
35
 
36
+ embeddings=OpenAIEmbeddings()
37
+ #embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
38
  #vectordb = Chroma.from_documents(texts, embedding=embeddings, persist_directory="./data")
39
  #db_retriever =vectordb.as_retriever(search_type="similarity",search_kwargs={'k':4})
40
 
 
84
  if "memory" not in st.session_state:
85
  st.session_state.memory = ConversationBufferWindowMemory(k=2, memory_key="chat_history",return_messages=True)
86
 
87
+ #embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",model_kwargs={"trust_remote_code":True,"revision":"289f532e14dbbbd5a04753fa58739e9ba766f3c7"})
88
  #db=FAISS.load_local("/content/ipc_vector_db", embeddings, allow_dangerous_deserialization=True)
89
 
 
 
 
 
 
 
 
90
 
91
+ prompt = PromptTemplate(template=custom_template,
92
  input_variables=['context', 'question', 'chat_history'])
93
 
94
  # You can also use other LLMs options from https://python.langchain.com/docs/integrations/llms. Here I have used TogetherAI API
95
 
96
  from config import together_api
97
+ llm=ChatOpenAI(temperature=0.2,model_name='gpt-3.5-turbo-0125')
 
 
 
 
 
98
  qa = ConversationalRetrievalChain.from_llm(
99
  llm=llm,
100
  memory=st.session_state.memory,