Ritesh-hf commited on
Commit
82f99a0
1 Parent(s): c2423fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -67,7 +67,7 @@ def initialize_pinecone(index_name: str):
67
 
68
  # Initialize Pinecone index and BM25 encoder
69
  pinecone_index = initialize_pinecone("updated-uae-gov")
70
- bm25 = BM25Encoder().load("./updated-uae-gov.json")
71
 
72
  ##################################################
73
  ##################################################
@@ -86,12 +86,12 @@ retriever = PineconeHybridSearchRetriever(
86
  llm = ChatPerplexity(temperature=0, pplx_api_key=GROQ_API_KEY, model="llama-3.1-sonar-large-128k-chat", max_tokens=512, max_retries=2)
87
 
88
  # Initialize Reranker
89
- # model = HuggingFaceCrossEncoder(model_name="BAAI/bge-reranker-base")
90
- # compressor = CrossEncoderReranker(model=model, top_n=10)
91
 
92
- # compression_retriever = ContextualCompressionRetriever(
93
- # base_compressor=compressor, base_retriever=retriever
94
- # )
95
 
96
  # Contextualization prompt and retriever
97
  contextualize_q_system_prompt = """Given a chat history and the latest user question \
@@ -106,11 +106,11 @@ contextualize_q_prompt = ChatPromptTemplate.from_messages(
106
  ("human", "{input}")
107
  ]
108
  )
109
- history_aware_retriever = create_history_aware_retriever(llm, retriever, contextualize_q_prompt)
110
 
111
  # QA system prompt and chain
112
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.
113
- If you don't know the answer, simply state that you don't know.
114
 
115
  YOUR ANSWER SHOULD BE IN '{language}' LANGUAGE.
116
 
 
67
 
68
  # Initialize Pinecone index and BM25 encoder
69
  pinecone_index = initialize_pinecone("updated-uae-gov")
70
+ bm25 = BM25Encoder().load("./new-updated-uae-gov.json")
71
 
72
  ##################################################
73
  ##################################################
 
86
  llm = ChatPerplexity(temperature=0, pplx_api_key=GROQ_API_KEY, model="llama-3.1-sonar-large-128k-chat", max_tokens=512, max_retries=2)
87
 
88
  # Initialize Reranker
89
+ model = HuggingFaceCrossEncoder(model_name="BAAI/bge-reranker-base")
90
+ compressor = CrossEncoderReranker(model=model, top_n=10)
91
 
92
+ compression_retriever = ContextualCompressionRetriever(
93
+ base_compressor=compressor, base_retriever=retriever
94
+ )
95
 
96
  # Contextualization prompt and retriever
97
  contextualize_q_system_prompt = """Given a chat history and the latest user question \
 
106
  ("human", "{input}")
107
  ]
108
  )
109
+ history_aware_retriever = create_history_aware_retriever(llm, compression_retriever, contextualize_q_prompt)
110
 
111
  # QA system prompt and chain
112
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.
113
+ If you don't know the answer, state that you don't know.
114
 
115
  YOUR ANSWER SHOULD BE IN '{language}' LANGUAGE.
116