DanyaalMajid commited on
Commit
38a4055
1 Parent(s): 4dc8cac

Update Prompt Template

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import time
2
  import streamlit as st
3
  from llama_index import ServiceContext, StorageContext, set_global_service_context, VectorStoreIndex, Document
 
4
  from llama_index.embeddings import LangchainEmbedding
5
  from langchain.embeddings.huggingface import HuggingFaceEmbeddings
6
  from llama_index.chat_engine.condense_question import CondenseQuestionChatEngine
@@ -77,7 +78,7 @@ def main():
77
  index = (VectorStoreIndex.from_documents(
78
  documents, service_context=service_context, storage_context=storage_context, llm=llm))
79
  # chat_engine = index.as_chat_engine(chat_mode="simple", verbose=True)
80
- custom_prompt = ""
81
  query_engine = index.as_query_engine()
82
  chat_engine = CondenseQuestionChatEngine.from_defaults(
83
  query_engine=query_engine,
 
1
  import time
2
  import streamlit as st
3
  from llama_index import ServiceContext, StorageContext, set_global_service_context, VectorStoreIndex, Document
4
+ from llama_index.prompts import PromptTemplate
5
  from llama_index.embeddings import LangchainEmbedding
6
  from langchain.embeddings.huggingface import HuggingFaceEmbeddings
7
  from llama_index.chat_engine.condense_question import CondenseQuestionChatEngine
 
78
  index = (VectorStoreIndex.from_documents(
79
  documents, service_context=service_context, storage_context=storage_context, llm=llm))
80
  # chat_engine = index.as_chat_engine(chat_mode="simple", verbose=True)
81
+ custom_prompt = PromptTemplate("")
82
  query_engine = index.as_query_engine()
83
  chat_engine = CondenseQuestionChatEngine.from_defaults(
84
  query_engine=query_engine,