Baweja commited on
Commit
2a68f98
·
verified ·
1 Parent(s): 88d25a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -17
app.py CHANGED
@@ -120,15 +120,15 @@ def respond(
120
  message,
121
  history: list[tuple[str, str]],
122
  system_message,
123
- max_tokens,
124
- temperature,
125
- top_p,
126
  ):
127
  # Load model
128
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
129
 
130
- dataset_path = "/IndexedDataFiles/my_knowledge_dataset"
131
- index_path = "/IndexedDataFiles/my_knowledge_dataset_hnsw_index.faiss"
132
 
133
  tokenizer = AutoTokenizer.from_pretrained("facebook/rag-sequence-nq")
134
  retriever = RagRetriever.from_pretrained("facebook/rag-sequence-nq", index_name="custom",
@@ -138,21 +138,15 @@ def respond(
138
  rag_model = RagSequenceForGeneration.from_pretrained('facebook/rag-sequence-nq', retriever=retriever)
139
  rag_model.retriever.init_retrieval()
140
  rag_model.to(device)
141
- messages = [{"role": "system", "content": system_message}]
142
 
143
- for val in history:
144
- if val[0]:
145
- messages.append({"role": "user", "content": val[0]})
146
- if val[1]:
147
- messages.append({"role": "assistant", "content": val[1]})
148
 
149
- messages.append({"role": "user", "content": message})
150
-
151
- #response = ""
152
 
153
- response = retrieved_info(rag_model, message)
154
-
155
- yield response
156
 
157
  """
158
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
 
120
  message,
121
  history: list[tuple[str, str]],
122
  system_message,
123
+ max_tokens = None,
124
+ temperature = None,
125
+ top_p = None,
126
  ):
127
  # Load model
128
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
129
 
130
+ dataset_path = "./IndexedDataFiles/my_knowledge_dataset"
131
+ index_path = "./IndexedDataFiles/my_knowledge_dataset_hnsw_index.faiss"
132
 
133
  tokenizer = AutoTokenizer.from_pretrained("facebook/rag-sequence-nq")
134
  retriever = RagRetriever.from_pretrained("facebook/rag-sequence-nq", index_name="custom",
 
138
  rag_model = RagSequenceForGeneration.from_pretrained('facebook/rag-sequence-nq', retriever=retriever)
139
  rag_model.retriever.init_retrieval()
140
  rag_model.to(device)
 
141
 
142
+ if message: # If there's a user query
143
+ response = retrieved_info(rag_model, message) # Get the answer from your local FAISS and Q&A model
144
+ return response
 
 
145
 
146
+ # In case no message, return an empty string
147
+ return ""
148
+
149
 
 
 
 
150
 
151
  """
152
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface