Spaces:
Runtime error
Runtime error
LOUIS SANNA
commited on
Commit
•
93decd4
1
Parent(s):
cde6d5c
feat(*): rename
Browse files- README.md +2 -0
- {climateqa → anyqa}/__init__.py +0 -0
- {climateqa → anyqa}/build_index.py +0 -0
- {climateqa → anyqa}/chains.py +7 -7
- {climateqa → anyqa}/custom_retrieval_chain.py +0 -0
- {climateqa → anyqa}/embeddings.py +0 -0
- {climateqa → anyqa}/llm.py +0 -0
- {climateqa → anyqa}/prompts.py +0 -0
- {climateqa → anyqa}/qa_logging.py +0 -0
- {climateqa → anyqa}/retriever.py +0 -0
- {climateqa → anyqa}/vectorstore.py +0 -0
- app.py +8 -8
README.md
CHANGED
@@ -13,6 +13,8 @@ pinned: false
|
|
13 |
|
14 |
A clone of the amazing https://huggingface.co/spaces/Ekimetrics/climate-question-answering.
|
15 |
|
|
|
|
|
16 |
## Build vector index
|
17 |
|
18 |
```bash
|
|
|
13 |
|
14 |
A clone of the amazing https://huggingface.co/spaces/Ekimetrics/climate-question-answering.
|
15 |
|
16 |
+
We abstracted the code so it's easy to build another tool based on another domain.
|
17 |
+
|
18 |
## Build vector index
|
19 |
|
20 |
```bash
|
{climateqa → anyqa}/__init__.py
RENAMED
File without changes
|
{climateqa → anyqa}/build_index.py
RENAMED
File without changes
|
{climateqa → anyqa}/chains.py
RENAMED
@@ -7,8 +7,8 @@ from langchain.chains import QAWithSourcesChain
|
|
7 |
from langchain.chains import TransformChain, SequentialChain
|
8 |
from langchain.chains.qa_with_sources import load_qa_with_sources_chain
|
9 |
|
10 |
-
from
|
11 |
-
from
|
12 |
|
13 |
|
14 |
def load_qa_chain_with_docs(llm):
|
@@ -20,7 +20,7 @@ def load_qa_chain_with_docs(llm):
|
|
20 |
```
|
21 |
output = chain({
|
22 |
"question":query,
|
23 |
-
"audience":"experts
|
24 |
"docs":docs,
|
25 |
"language":"English",
|
26 |
})
|
@@ -54,18 +54,18 @@ def load_qa_chain_with_text(llm):
|
|
54 |
return qa_chain
|
55 |
|
56 |
|
57 |
-
def
|
58 |
reformulation_chain = load_reformulation_chain(llm_reformulation)
|
59 |
answer_chain = load_qa_chain_with_retriever(retriever, llm_answer)
|
60 |
|
61 |
-
|
62 |
chains=[reformulation_chain, answer_chain],
|
63 |
input_variables=["query", "audience"],
|
64 |
output_variables=["answer", "question", "language", "source_documents"],
|
65 |
return_all=True,
|
66 |
verbose=True,
|
67 |
)
|
68 |
-
return
|
69 |
|
70 |
|
71 |
def load_reformulation_chain(llm):
|
@@ -112,6 +112,6 @@ def load_qa_chain_with_retriever(retriever, llm):
|
|
112 |
retriever=retriever,
|
113 |
return_source_documents=True,
|
114 |
verbose=True,
|
115 |
-
fallback_answer="**⚠️ No relevant passages found in the
|
116 |
)
|
117 |
return answer_chain
|
|
|
7 |
from langchain.chains import TransformChain, SequentialChain
|
8 |
from langchain.chains.qa_with_sources import load_qa_with_sources_chain
|
9 |
|
10 |
+
from anyqa.prompts import answer_prompt, reformulation_prompt
|
11 |
+
from anyqa.custom_retrieval_chain import CustomRetrievalQAWithSourcesChain
|
12 |
|
13 |
|
14 |
def load_qa_chain_with_docs(llm):
|
|
|
20 |
```
|
21 |
output = chain({
|
22 |
"question":query,
|
23 |
+
"audience":"experts scientists",
|
24 |
"docs":docs,
|
25 |
"language":"English",
|
26 |
})
|
|
|
54 |
return qa_chain
|
55 |
|
56 |
|
57 |
+
def load_qa_chain(retriever, llm_reformulation, llm_answer):
|
58 |
reformulation_chain = load_reformulation_chain(llm_reformulation)
|
59 |
answer_chain = load_qa_chain_with_retriever(retriever, llm_answer)
|
60 |
|
61 |
+
qa_chain = SequentialChain(
|
62 |
chains=[reformulation_chain, answer_chain],
|
63 |
input_variables=["query", "audience"],
|
64 |
output_variables=["answer", "question", "language", "source_documents"],
|
65 |
return_all=True,
|
66 |
verbose=True,
|
67 |
)
|
68 |
+
return qa_chain
|
69 |
|
70 |
|
71 |
def load_reformulation_chain(llm):
|
|
|
112 |
retriever=retriever,
|
113 |
return_source_documents=True,
|
114 |
verbose=True,
|
115 |
+
fallback_answer="**⚠️ No relevant passages found in the sources, you may want to ask a more specific question.**",
|
116 |
)
|
117 |
return answer_chain
|
{climateqa → anyqa}/custom_retrieval_chain.py
RENAMED
File without changes
|
{climateqa → anyqa}/embeddings.py
RENAMED
File without changes
|
{climateqa → anyqa}/llm.py
RENAMED
File without changes
|
{climateqa → anyqa}/prompts.py
RENAMED
File without changes
|
{climateqa → anyqa}/qa_logging.py
RENAMED
File without changes
|
{climateqa → anyqa}/retriever.py
RENAMED
File without changes
|
{climateqa → anyqa}/vectorstore.py
RENAMED
File without changes
|
app.py
CHANGED
@@ -7,14 +7,14 @@ from langchain.embeddings import HuggingFaceEmbeddings
|
|
7 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
8 |
|
9 |
# ClimateQ&A imports
|
10 |
-
from
|
11 |
-
from
|
12 |
-
from
|
13 |
-
from
|
14 |
-
from
|
15 |
-
from
|
16 |
-
from
|
17 |
-
from
|
18 |
|
19 |
# Load environment variables in local mode
|
20 |
try:
|
|
|
7 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
8 |
|
9 |
# ClimateQ&A imports
|
10 |
+
from anyqa.embeddings import EMBEDDING_MODEL_NAME
|
11 |
+
from anyqa.llm import get_llm
|
12 |
+
from anyqa.qa_logging import log
|
13 |
+
from anyqa.chains import load_qa_chain_with_text
|
14 |
+
from anyqa.chains import load_reformulation_chain
|
15 |
+
from anyqa.vectorstore import get_vectorstore
|
16 |
+
from anyqa.retriever import QARetriever
|
17 |
+
from anyqa.prompts import audience_prompts
|
18 |
|
19 |
# Load environment variables in local mode
|
20 |
try:
|