Spaces:
Runtime error
Runtime error
Umama-at-Bluchip
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ embeddings = HuggingFaceEmbeddings(model_name="nomic-ai/nomic-embed-text-v1",mod
|
|
24 |
db = FAISS.load_local("medchat_db", embeddings, allow_dangerous_deserialization=True)
|
25 |
db_retriever = db.as_retriever(search_type="similarity",search_kwargs={"k": 4})
|
26 |
|
27 |
-
custom_prompt_template = """Follow these instructions clearly. This is a chat template and you are a medical practitioner chat bot who provides correct medical information.
|
28 |
|
29 |
CONTEXT: {context}
|
30 |
|
@@ -41,7 +41,7 @@ prompt = PromptTemplate(template=custom_prompt_template,
|
|
41 |
|
42 |
llm = Together(
|
43 |
model="mistralai/Mistral-7B-Instruct-v0.2",
|
44 |
-
temperature=0.
|
45 |
max_tokens=512,
|
46 |
together_api_key="48515099b0ed4e22e56da54e50feb4adfaaa901a444b0c34bb33c66abe7b2c61"
|
47 |
)
|
@@ -50,6 +50,7 @@ qa = ConversationalRetrievalChain.from_llm(
|
|
50 |
llm=llm,
|
51 |
memory=st.session_state.memory,
|
52 |
retriever=db_retriever,
|
|
|
53 |
)
|
54 |
|
55 |
for message in st.session_state.messages:
|
|
|
24 |
db = FAISS.load_local("medchat_db", embeddings, allow_dangerous_deserialization=True)
|
25 |
db_retriever = db.as_retriever(search_type="similarity",search_kwargs={"k": 4})
|
26 |
|
27 |
+
custom_prompt_template = """Follow these instructions clearly. This is a chat template and you are a medical practitioner chat bot who provides correct medical information. Answer the user's question by utilizing the provided knowledge base. Use your own knowledge base and answer the question when the context is not related to the user's question.
|
28 |
|
29 |
CONTEXT: {context}
|
30 |
|
|
|
41 |
|
42 |
llm = Together(
|
43 |
model="mistralai/Mistral-7B-Instruct-v0.2",
|
44 |
+
temperature=0.5,
|
45 |
max_tokens=512,
|
46 |
together_api_key="48515099b0ed4e22e56da54e50feb4adfaaa901a444b0c34bb33c66abe7b2c61"
|
47 |
)
|
|
|
50 |
llm=llm,
|
51 |
memory=st.session_state.memory,
|
52 |
retriever=db_retriever,
|
53 |
+
combine_docs_chain_kwargs={'prompt': prompt}
|
54 |
)
|
55 |
|
56 |
for message in st.session_state.messages:
|