hbui commited on
Commit
fd5ff1b
1 Parent(s): 0cce375

Update models/langOpen.py

Browse files
Files changed (1) hide show
  1. models/langOpen.py +9 -18
models/langOpen.py CHANGED
@@ -1,18 +1,13 @@
1
  import os
2
 
3
  import openai
4
- from dotenv import load_dotenv
5
  from langchain.chains import LLMChain
6
  from langchain.chat_models import ChatOpenAI
7
- from langchain.document_loaders import PyPDFLoader
8
- from langchain.embeddings.openai import OpenAIEmbeddings
9
- from langchain.prompts import PromptTemplate
10
- from langchain.vectorstores import FAISS
11
-
12
- loader = PyPDFLoader("./assets/pdf/CADWReg.pdf")
13
- pages = loader.load_and_split()
14
 
15
- load_dotenv()
 
 
16
 
17
  prompt_template = """Answer the question using the given context to the best of your ability.
18
  If you don't know, answer I don't know.
@@ -29,15 +24,11 @@ class LangOpen:
29
  self.chain = LLMChain(llm=self.llm, prompt=PROMPT)
30
 
31
  def initialize_index(self, index_name):
32
- path = f"./vectorStores/{index_name}"
33
- embeddings = OpenAIEmbeddings()
34
-
35
- if os.path.exists(path=path):
36
- return FAISS.load_local(folder_path=path, embeddings=embeddings)
37
- else:
38
- faiss = FAISS.from_documents(pages, embeddings)
39
- faiss.save_local(path)
40
- return faiss
41
 
42
  def get_response(self, query_str):
43
  print("query_str: ", query_str)
 
1
  import os
2
 
3
  import openai
4
+
5
  from langchain.chains import LLMChain
6
  from langchain.chat_models import ChatOpenAI
 
 
 
 
 
 
 
7
 
8
+ from langchain.embeddings import OpenAIEmbeddings
9
+ from langchain.prompts import PromptTemplate
10
+ from langchain_pinecone import PineconeVectorStore
11
 
12
  prompt_template = """Answer the question using the given context to the best of your ability.
13
  If you don't know, answer I don't know.
 
24
  self.chain = LLMChain(llm=self.llm, prompt=PROMPT)
25
 
26
  def initialize_index(self, index_name):
27
+ embeddings = OpenAIEmbeddings(model="text-embedding-3-large")
28
+ index_name = "openai-embeddings"
29
+ vectorstore = PineconeVectorStore(index_name=index_name, embedding=embeddings)
30
+ return vectorstore
31
+
 
 
 
 
32
 
33
  def get_response(self, query_str):
34
  print("query_str: ", query_str)