RishuD7 commited on
Commit
f49ef44
·
1 Parent(s): 37f8714
Files changed (2) hide show
  1. app.py +5 -2
  2. privateGPT.py +0 -2
app.py CHANGED
@@ -24,8 +24,6 @@ source_directory = os.environ.get('SOURCE_DIRECTORY', 'source_documents')
24
  from constants import CHROMA_SETTINGS
25
 
26
 
27
- embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)
28
- llm = HuggingFacePipeline.from_model_id(model_id=model, task="text-generation", device=0, model_kwargs={"temperature":0.1,"trust_remote_code": True, "max_length":100000, "top_p":0.15, "top_k":0, "repetition_penalty":1.1, "num_return_sequences":1,})
29
 
30
  # async def test_embedding():
31
  # # Create the folder if it doesn't exist
@@ -102,10 +100,15 @@ def embed_documents(files, collection_name: Optional[str] = None):
102
  def retrieve_documents(query: str, collection_name:str):
103
  target_source_chunks = 4
104
  mute_stream = ""
 
 
105
  db = Chroma(persist_directory=persist_directory,collection_name=collection_name, embedding_function=embeddings, client_settings=CHROMA_SETTINGS)
106
  retriever = db.as_retriever(search_kwargs={"k": target_source_chunks})
107
  # Prepare the LLM
108
  callbacks = [] if mute_stream else [StreamingStdOutCallbackHandler()]
 
 
 
109
  qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=False)
110
 
111
  # Get the answer from the chain
 
24
  from constants import CHROMA_SETTINGS
25
 
26
 
 
 
27
 
28
  # async def test_embedding():
29
  # # Create the folder if it doesn't exist
 
100
  def retrieve_documents(query: str, collection_name:str):
101
  target_source_chunks = 4
102
  mute_stream = ""
103
+ embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)
104
+
105
  db = Chroma(persist_directory=persist_directory,collection_name=collection_name, embedding_function=embeddings, client_settings=CHROMA_SETTINGS)
106
  retriever = db.as_retriever(search_kwargs={"k": target_source_chunks})
107
  # Prepare the LLM
108
  callbacks = [] if mute_stream else [StreamingStdOutCallbackHandler()]
109
+
110
+ llm = HuggingFacePipeline.from_model_id(model_id=model, task="text-generation", device=0, model_kwargs={"temperature":0.1,"trust_remote_code": True, "max_length":100000, "top_p":0.15, "top_k":0, "repetition_penalty":1.1, "num_return_sequences":1,})
111
+
112
  qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=False)
113
 
114
  # Get the answer from the chain
privateGPT.py CHANGED
@@ -5,8 +5,6 @@ from langchain.vectorstores import Chroma
5
  from langchain.llms import GPT4All, LlamaCpp
6
  import os
7
 
8
- load_dotenv()
9
-
10
  # embeddings_model_name = os.environ.get("EMBEDDINGS_MODEL_NAME")
11
  # persist_directory = os.environ.get('PERSIST_DIRECTORY')
12
 
 
5
  from langchain.llms import GPT4All, LlamaCpp
6
  import os
7
 
 
 
8
  # embeddings_model_name = os.environ.get("EMBEDDINGS_MODEL_NAME")
9
  # persist_directory = os.environ.get('PERSIST_DIRECTORY')
10