Spaces:
Sleeping
Sleeping
import os | |
import openai | |
from langchain.chains import LLMChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.embeddings import OpenAIEmbeddings | |
from langchain.prompts import PromptTemplate | |
from langchain_pinecone import PineconeVectorStore | |
prompt_template = """Answer the question using the given context to the best of your ability. | |
If you don't know, answer I don't know. | |
Context: {context} | |
Topic: {topic} | |
Use the following example format for your answer: | |
[FORMAT] | |
Answer: | |
The answer to the user question. | |
Reference: | |
The list of references to the specific sections of the documents that support your answer. | |
[END_FORMAT] | |
""" | |
PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "topic"]) | |
class LangOpen: | |
def __init__(self, model_name: str) -> None: | |
self.index = self.initialize_index("langOpen") | |
self.llm = ChatOpenAI(temperature=0.3, model=model_name) | |
self.chain = LLMChain(llm=self.llm, prompt=PROMPT) | |
def initialize_index(self, index_name): | |
embeddings = OpenAIEmbeddings(model="text-embedding-3-large") | |
index_name = "openai-embeddings" | |
vectorstore = PineconeVectorStore(index_name=index_name, embedding=embeddings) | |
return vectorstore | |
def get_response(self, query_str): | |
print("query_str: ", query_str) | |
print("model_name: ", self.llm.model_name) | |
docs = self.index.similarity_search(query_str, k=4) | |
inputs = [{"context": doc.page_content, "topic": query_str} for doc in docs] | |
result = self.chain.apply(inputs)[0]["text"] | |
return result | |