Spaces:
Sleeping
Sleeping
import os | |
import openai | |
from dotenv import load_dotenv | |
from langchain.chains import LLMChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
from langchain.prompts import PromptTemplate | |
from langchain.vectorstores import FAISS | |
loader = PyPDFLoader("./assets/pdf/CADWReg.pdf") | |
pages = loader.load_and_split() | |
load_dotenv() | |
prompt_template = """Answer the question using the given context to the best of your ability. | |
If you don't know, answer I don't know. | |
Context: {context} | |
Topic: {topic}""" | |
PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "topic"]) | |
class LangOpen: | |
def __init__(self, model_name: str) -> None: | |
self.index = self.initialize_index("langOpen") | |
self.llm = ChatOpenAI(temperature=0.3, model=model_name) | |
self.chain = LLMChain(llm=self.llm, prompt=PROMPT) | |
def initialize_index(self, index_name): | |
path = f"./vectorStores/{index_name}" | |
embeddings = OpenAIEmbeddings() | |
if os.path.exists(path=path): | |
return FAISS.load_local(folder_path=path, embeddings=embeddings) | |
else: | |
faiss = FAISS.from_documents(pages, embeddings) | |
faiss.save_local(path) | |
return faiss | |
def get_response(self, query_str): | |
print("query_str: ", query_str) | |
print("model_name: ", self.llm.model_name) | |
docs = self.index.similarity_search(query_str, k=4) | |
inputs = [{"context": doc.page_content, "topic": query_str} for doc in docs] | |
result = self.chain.apply(inputs)[0]["text"] | |
return result | |