ffreemt commited on
Commit
ab527e7
1 Parent(s): b8dad98

Update prompt using langchain.PromptTemplate

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +22 -5
.gitignore CHANGED
@@ -8,3 +8,4 @@ __pycache__
8
  .chroma
9
  .pnp.cjs
10
  models
 
 
8
  .chroma
9
  .pnp.cjs
10
  models
11
+ db1
app.py CHANGED
@@ -434,6 +434,10 @@ def process_files(
434
  def embed_files(progress=gr.Progress()):
435
  """Embded ns.files_uploaded."""
436
  # initialize if necessary
 
 
 
 
437
  if ns.db is None:
438
  logger.info(f"loading {ns.model_name:}")
439
  embedding = SentenceTransformerEmbeddings(
@@ -479,7 +483,11 @@ def embed_files(progress=gr.Progress()):
479
  max_retries=3,
480
  client=None,
481
  )
482
- retriever = ns.db.as_retriever()
 
 
 
 
483
 
484
  prompt_template = """You're an AI version of the book and are supposed to answer quesions people
485
  have for the book. Thanks to advancements in AI people can
@@ -504,14 +512,23 @@ def embed_files(progress=gr.Progress()):
504
  )
505
 
506
  ns.qa = RetrievalQA.from_chain_type(
507
- prompt=prompt,
508
- input_variables=["context", "context"],
509
  llm=llm,
510
  chain_type="stuff",
511
  retriever=retriever,
512
- # k=4, # default 4
513
- # return_source_documents=True, # default False
 
 
 
 
 
 
 
 
 
 
514
  )
 
515
 
516
  logger.debug(f"{ns.ingest_done=}, exit process_files")
517
 
 
434
  def embed_files(progress=gr.Progress()):
435
  """Embded ns.files_uploaded."""
436
  # initialize if necessary
437
+
438
+ # ns.db = Chroma.from_documents(doc_chunks, embedding, persist_directory='db')
439
+ # ns.db = Chroma.from_documents(doc_chunks, embedding)
440
+
441
  if ns.db is None:
442
  logger.info(f"loading {ns.model_name:}")
443
  embedding = SentenceTransformerEmbeddings(
 
483
  max_retries=3,
484
  client=None,
485
  )
486
+
487
+ # retriever = db.as_retriever(search_type="similarity", search_kwargs={"k": 2})
488
+ retriever = ns.db.as_retriever(
489
+ # search_kwargs={"k": 6} # defaukt k=4
490
+ )
491
 
492
  prompt_template = """You're an AI version of the book and are supposed to answer quesions people
493
  have for the book. Thanks to advancements in AI people can
 
512
  )
513
 
514
  ns.qa = RetrievalQA.from_chain_type(
 
 
515
  llm=llm,
516
  chain_type="stuff",
517
  retriever=retriever,
518
+ chain_type_kwargs = {"prompt": prompt},
519
+ return_source_documents=True, # default False
520
+ )
521
+
522
+ _ = """ VectorDBQA deprecated
523
+ chain = RetrievalQA.from_chain_type(
524
+ chain_type_kwargs = {"prompt": prompt},
525
+ llm=llm,
526
+ chain_type="stuff",
527
+ retriever=retriever,
528
+ # vectorstore=ns.db,
529
+ return_source_documents=True,
530
  )
531
+ # """
532
 
533
  logger.debug(f"{ns.ingest_done=}, exit process_files")
534