Update README.md
Browse files
README.md
CHANGED
@@ -1,3 +1,80 @@
|
|
1 |
---
|
2 |
license: apache-2.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
license: apache-2.0
|
3 |
+
task_categories:
|
4 |
+
- question-answering
|
5 |
+
- summarization
|
6 |
+
- conversational
|
7 |
+
- sentence-similarity
|
8 |
+
language:
|
9 |
+
- en
|
10 |
+
pretty_name: FAISS Vector Store of Embeddings of the Chartered Financial Analysts Level 1 Curriculum
|
11 |
+
tags:
|
12 |
+
- faiss
|
13 |
+
- langchain
|
14 |
+
- instructor embeddings
|
15 |
+
- vector stores
|
16 |
+
- LLM
|
17 |
---
|
18 |
+
Vector store of embeddings for CFA Level 1 Curriculum
|
19 |
+
|
20 |
+
This is a faiss vector store created with Sentence Transformer embeddings using LangChain . Use it for similarity search, question answering or anything else that leverages embeddings! 😃
|
21 |
+
|
22 |
+
Creating these embeddings can take a while so here's a convenient, downloadable one 🤗
|
23 |
+
|
24 |
+
How to use
|
25 |
+
|
26 |
+
Download data
|
27 |
+
Load to use with LangChain
|
28 |
+
pip install -qqq langchain sentence_transformers faiss-cpu huggingface_hub
|
29 |
+
import os
|
30 |
+
from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceInstructEmbeddings
|
31 |
+
|
32 |
+
from langchain.vectorstores.faiss import FAISS
|
33 |
+
from huggingface_hub import snapshot_download
|
34 |
+
|
35 |
+
# download the vectorstore for the book you want
|
36 |
+
|
37 |
+
cache_dir="cfa_level_1_cache"
|
38 |
+
vectorstore = snapshot_download(repo_id="nickmuchi/CFA_Level_1_Text_Embeddings",
|
39 |
+
repo_type="dataset",
|
40 |
+
revision="main",
|
41 |
+
allow_patterns=f"books/{book}/*", # to download only the one book
|
42 |
+
cache_dir=cache_dir,
|
43 |
+
)
|
44 |
+
|
45 |
+
# get path to the `vectorstore` folder that you just downloaded
|
46 |
+
# we'll look inside the `cache_dir` for the folder we want
|
47 |
+
target_dir = f"cfa/cfa_level_1"
|
48 |
+
|
49 |
+
# Walk through the directory tree recursively
|
50 |
+
for root, dirs, files in os.walk(cache_dir):
|
51 |
+
# Check if the target directory is in the list of directories
|
52 |
+
if target_dir in dirs:
|
53 |
+
# Get the full path of the target directory
|
54 |
+
target_path = os.path.join(root, target_dir)
|
55 |
+
|
56 |
+
# load embeddings
|
57 |
+
# this is what was used to create embeddings for the text
|
58 |
+
|
59 |
+
embed_instruction = "Represent the financial paragraph for document retrieval: "
|
60 |
+
query_instruction = "Represent the question for retrieving supporting documents: "
|
61 |
+
|
62 |
+
model_sbert = "sentence-transformers/all-mpnet-base-v2"
|
63 |
+
sbert_emb = HuggingFaceEmbeddings(model_name=model_sbert)
|
64 |
+
|
65 |
+
model_instr = "hkunlp/instructor-large"
|
66 |
+
instruct_emb = HuggingFaceInstructEmbeddings(model_name=model_instr,
|
67 |
+
embed_instruction=embed_instruction,
|
68 |
+
query_instruction=query_instruction)
|
69 |
+
|
70 |
+
# load vector store to use with langchain
|
71 |
+
docsearch = FAISS.load_local(folder_path=target_path, embeddings=sbert_emb)
|
72 |
+
|
73 |
+
# similarity search
|
74 |
+
question = "How do you hedge the interest rate risk of an MBS?"
|
75 |
+
search = docsearch.similarity_search(question, k=4)
|
76 |
+
|
77 |
+
for item in search:
|
78 |
+
print(item.page_content)
|
79 |
+
print(f"From page: {item.metadata['page']}")
|
80 |
+
print("---")
|