PebinAPJ commited on
Commit
3e52811
·
verified ·
1 Parent(s): a21acde

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -12
app.py CHANGED
@@ -2,11 +2,11 @@ import streamlit as st
2
  from dotenv import load_dotenv
3
  from PyPDF2 import PdfReader
4
  from langchain.text_splitter import CharacterTextSplitter
5
- from langchain_huggingface import HuggingFaceEmbeddings
6
  from langchain_community.vectorstores import FAISS
7
- from langchain_huggingface import HuggingFaceHub
8
  from langchain.memory import ConversationBufferMemory
9
  from langchain.chains import ConversationalRetrievalChain
 
10
  import os
11
 
12
 
@@ -44,21 +44,18 @@ def get_vectorstore(text_chunks):
44
 
45
  # Function to create the conversation chain using T5 from Hugging Face API
46
  def get_conversation_chain(vectorstore):
47
- llm = HuggingFaceHub(
48
- repo_id="google/t5-large", # Replace with your Hugging Face model ID
49
- temperature=0.7, # Adjust for creativity
50
- max_new_tokens=100, # Control response length
51
- top_p=0.9, # Nucleus sampling for better variety
52
- top_k=50, # Top-k filtering
53
- repetition_penalty=1.0 # Default value to prevent repetitive outputs
54
- )
55
  memory = ConversationBufferMemory(
56
  memory_key="chat_history", return_messages=True
57
  )
58
 
59
- # Create a conversation chain using the correct LLM (llm, not t5_model)
60
  conversation_chain = ConversationalRetrievalChain.from_llm(
61
- llm=llm, # Pass 'llm' instead of 't5_model'
62
  retriever=vectorstore.as_retriever(),
63
  memory=memory,
64
  )
 
2
  from dotenv import load_dotenv
3
  from PyPDF2 import PdfReader
4
  from langchain.text_splitter import CharacterTextSplitter
5
+ from langchain_huggingface import HuggingFaceEmbeddings, HuggingFacePipeline
6
  from langchain_community.vectorstores import FAISS
 
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
+ from transformers import pipeline # Hugging Face pipeline for using T5 model
10
  import os
11
 
12
 
 
44
 
45
  # Function to create the conversation chain using T5 from Hugging Face API
46
  def get_conversation_chain(vectorstore):
47
+ # Use HuggingFacePipeline with a Hugging Face T5 model
48
+ t5_model = pipeline("text2text-generation", model="google/t5-large", tokenizer="google/t5-large")
49
+
50
+ llm = HuggingFacePipeline(pipeline=t5_model)
51
+
 
 
 
52
  memory = ConversationBufferMemory(
53
  memory_key="chat_history", return_messages=True
54
  )
55
 
56
+ # Create a conversation chain using the T5 model
57
  conversation_chain = ConversationalRetrievalChain.from_llm(
58
+ llm=llm,
59
  retriever=vectorstore.as_retriever(),
60
  memory=memory,
61
  )