LOUIS SANNA commited on
Commit
93decd4
1 Parent(s): cde6d5c

feat(*): rename

Browse files
README.md CHANGED
@@ -13,6 +13,8 @@ pinned: false
13
 
14
  A clone of the amazing https://huggingface.co/spaces/Ekimetrics/climate-question-answering.
15
 
 
 
16
  ## Build vector index
17
 
18
  ```bash
 
13
 
14
  A clone of the amazing https://huggingface.co/spaces/Ekimetrics/climate-question-answering.
15
 
16
+ We abstracted the code so it's easy to build another tool based on another domain.
17
+
18
  ## Build vector index
19
 
20
  ```bash
{climateqa → anyqa}/__init__.py RENAMED
File without changes
{climateqa → anyqa}/build_index.py RENAMED
File without changes
{climateqa → anyqa}/chains.py RENAMED
@@ -7,8 +7,8 @@ from langchain.chains import QAWithSourcesChain
7
  from langchain.chains import TransformChain, SequentialChain
8
  from langchain.chains.qa_with_sources import load_qa_with_sources_chain
9
 
10
- from climateqa.prompts import answer_prompt, reformulation_prompt
11
- from climateqa.custom_retrieval_chain import CustomRetrievalQAWithSourcesChain
12
 
13
 
14
  def load_qa_chain_with_docs(llm):
@@ -20,7 +20,7 @@ def load_qa_chain_with_docs(llm):
20
  ```
21
  output = chain({
22
  "question":query,
23
- "audience":"experts climate scientists",
24
  "docs":docs,
25
  "language":"English",
26
  })
@@ -54,18 +54,18 @@ def load_qa_chain_with_text(llm):
54
  return qa_chain
55
 
56
 
57
- def load_climateqa_chain(retriever, llm_reformulation, llm_answer):
58
  reformulation_chain = load_reformulation_chain(llm_reformulation)
59
  answer_chain = load_qa_chain_with_retriever(retriever, llm_answer)
60
 
61
- climateqa_chain = SequentialChain(
62
  chains=[reformulation_chain, answer_chain],
63
  input_variables=["query", "audience"],
64
  output_variables=["answer", "question", "language", "source_documents"],
65
  return_all=True,
66
  verbose=True,
67
  )
68
- return climateqa_chain
69
 
70
 
71
  def load_reformulation_chain(llm):
@@ -112,6 +112,6 @@ def load_qa_chain_with_retriever(retriever, llm):
112
  retriever=retriever,
113
  return_source_documents=True,
114
  verbose=True,
115
- fallback_answer="**⚠️ No relevant passages found in the climate science reports (IPCC and IPBES), you may want to ask a more specific question (specifying your question on climate issues).**",
116
  )
117
  return answer_chain
 
7
  from langchain.chains import TransformChain, SequentialChain
8
  from langchain.chains.qa_with_sources import load_qa_with_sources_chain
9
 
10
+ from anyqa.prompts import answer_prompt, reformulation_prompt
11
+ from anyqa.custom_retrieval_chain import CustomRetrievalQAWithSourcesChain
12
 
13
 
14
  def load_qa_chain_with_docs(llm):
 
20
  ```
21
  output = chain({
22
  "question":query,
23
+ "audience":"experts scientists",
24
  "docs":docs,
25
  "language":"English",
26
  })
 
54
  return qa_chain
55
 
56
 
57
+ def load_qa_chain(retriever, llm_reformulation, llm_answer):
58
  reformulation_chain = load_reformulation_chain(llm_reformulation)
59
  answer_chain = load_qa_chain_with_retriever(retriever, llm_answer)
60
 
61
+ qa_chain = SequentialChain(
62
  chains=[reformulation_chain, answer_chain],
63
  input_variables=["query", "audience"],
64
  output_variables=["answer", "question", "language", "source_documents"],
65
  return_all=True,
66
  verbose=True,
67
  )
68
+ return qa_chain
69
 
70
 
71
  def load_reformulation_chain(llm):
 
112
  retriever=retriever,
113
  return_source_documents=True,
114
  verbose=True,
115
+ fallback_answer="**⚠️ No relevant passages found in the sources, you may want to ask a more specific question.**",
116
  )
117
  return answer_chain
{climateqa → anyqa}/custom_retrieval_chain.py RENAMED
File without changes
{climateqa → anyqa}/embeddings.py RENAMED
File without changes
{climateqa → anyqa}/llm.py RENAMED
File without changes
{climateqa → anyqa}/prompts.py RENAMED
File without changes
{climateqa → anyqa}/qa_logging.py RENAMED
File without changes
{climateqa → anyqa}/retriever.py RENAMED
File without changes
{climateqa → anyqa}/vectorstore.py RENAMED
File without changes
app.py CHANGED
@@ -7,14 +7,14 @@ from langchain.embeddings import HuggingFaceEmbeddings
7
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
8
 
9
  # ClimateQ&A imports
10
- from climateqa.embeddings import EMBEDDING_MODEL_NAME
11
- from climateqa.llm import get_llm
12
- from climateqa.qa_logging import log
13
- from climateqa.chains import load_qa_chain_with_text
14
- from climateqa.chains import load_reformulation_chain
15
- from climateqa.vectorstore import get_vectorstore
16
- from climateqa.retriever import QARetriever
17
- from climateqa.prompts import audience_prompts
18
 
19
  # Load environment variables in local mode
20
  try:
 
7
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
8
 
9
  # ClimateQ&A imports
10
+ from anyqa.embeddings import EMBEDDING_MODEL_NAME
11
+ from anyqa.llm import get_llm
12
+ from anyqa.qa_logging import log
13
+ from anyqa.chains import load_qa_chain_with_text
14
+ from anyqa.chains import load_reformulation_chain
15
+ from anyqa.vectorstore import get_vectorstore
16
+ from anyqa.retriever import QARetriever
17
+ from anyqa.prompts import audience_prompts
18
 
19
  # Load environment variables in local mode
20
  try: