prithvirajpawar commited on
Commit
140b902
·
1 Parent(s): 68acb0b

addition of intro_msg

Browse files
Files changed (2) hide show
  1. app.py +10 -8
  2. helpmate_ai.py +5 -4
app.py CHANGED
@@ -2,7 +2,7 @@ from fastapi import FastAPI, Request, Depends, HTTPException, Header, File, Uplo
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from typing import List, Optional
5
- from helpmate_ai import get_system_msg, retreive_results, rerank_with_cross_encoder, generate_response
6
  import google.generativeai as genai
7
  import os
8
  from dotenv import load_dotenv
@@ -11,6 +11,7 @@ import re
11
  import speech_recognition as sr
12
  from io import BytesIO
13
  import wave
 
14
  import google.generativeai as genai
15
 
16
 
@@ -77,11 +78,11 @@ def get_gemini_completions(conversation: str) -> str:
77
  async def initialize_chat():
78
  global conversation_bot
79
 
80
- conversation = "Hi"
81
- introduction = get_gemini_completions(conversation)
82
- conversation_bot = [Message(role="bot", content=introduction)]
83
  return ChatResponse(
84
- response=introduction,
85
  conversation=conversation_bot
86
  )
87
 
@@ -109,6 +110,7 @@ async def chat(request: ChatRequest):
109
  response=response_assistant,
110
  conversation=conversation_bot
111
  )
 
112
  # Voice processing endpoint
113
  @app.post("/process-voice")
114
  async def process_voice(audio_file: UploadFile = File(...), dependencies=[Depends(verify_api_key)]):
@@ -161,9 +163,9 @@ async def handle_feedback(
161
  async def reset_conversation():
162
  global conversation_bot, conversation
163
  conversation_bot = []
164
- conversation = "Hi"
165
- introduction = get_gemini_completions(conversation)
166
- conversation_bot.append(Message(role="bot", content=introduction))
167
  return {"status": "success", "message": "Conversation reset"}
168
 
169
  if __name__ == "__main__":
 
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from typing import List, Optional
5
+ from helpmate_ai import get_system_msg, retreive_results, rerank_with_cross_encoder, generate_response, intro_message
6
  import google.generativeai as genai
7
  import os
8
  from dotenv import load_dotenv
 
11
  import speech_recognition as sr
12
  from io import BytesIO
13
  import wave
14
+
15
  import google.generativeai as genai
16
 
17
 
 
78
  async def initialize_chat():
79
  global conversation_bot
80
 
81
+ # conversation = "Hi"
82
+ # introduction = get_gemini_completions(conversation)
83
+ conversation_bot = [Message(role="bot", content=intro_message)]
84
  return ChatResponse(
85
+ response=intro_message,
86
  conversation=conversation_bot
87
  )
88
 
 
110
  response=response_assistant,
111
  conversation=conversation_bot
112
  )
113
+
114
  # Voice processing endpoint
115
  @app.post("/process-voice")
116
  async def process_voice(audio_file: UploadFile = File(...), dependencies=[Depends(verify_api_key)]):
 
163
  async def reset_conversation():
164
  global conversation_bot, conversation
165
  conversation_bot = []
166
+ # conversation = "Hi"
167
+ # introduction = get_gemini_completions(conversation)
168
+ conversation_bot.append(Message(role="bot", content=intro_message))
169
  return {"status": "success", "message": "Conversation reset"}
170
 
171
  if __name__ == "__main__":
helpmate_ai.py CHANGED
@@ -2,6 +2,8 @@
2
  import pandas as pd
3
  import chromadb
4
 
 
 
5
  def get_system_msg():
6
  """
7
  Generate a response using GPT-3.5's ChatCompletion based on the user query and retrieved information.
@@ -40,13 +42,12 @@ def get_system_msg():
40
 
41
  Guidelines:
42
  1. Extract information that directly answers the user's query from the document excerpts.
43
- 3. Provide the final response as a well-formatted HTML test and easily readable text along with the citation.
44
  4. Provide your complete response using the relevant parts in the documents.
45
  5. The generated response should answer the query directly addressing the user and avoiding additional information about how you work.
46
  6. If the provided excerpts do not fully answer the query, provide partial information and suggest which sections of the policy document the user should review for further details.
47
  7. If no relevant information is found in the provided excerpts, respond with 'No relevant information found in the provided excerpts.'
48
-
49
- <When user says 'Hi' respond with a short welcome message which also has policy name and a smiley.>
50
  """
51
  ]
52
 
@@ -118,7 +119,7 @@ def retreive_results(query):
118
  # Query the collection against the user query and return the top 10 results
119
  results = insurance_collection.query(
120
  query_texts=query,
121
- n_results=10
122
  )
123
 
124
  # Store the query in cache_collection as document w.r.t to ChromaDB so that it can be embedded and searched against later
 
2
  import pandas as pd
3
  import chromadb
4
 
5
+ intro_message = 'Hello! 😊 ask me questions about Inurance Policy. Sample questions you can ask are \'What is the name of policy?\', \'What are the different insurance offered?\', \'What are the premium rates for different types of insurance under this policy?\' etc.'
6
+
7
  def get_system_msg():
8
  """
9
  Generate a response using GPT-3.5's ChatCompletion based on the user query and retrieved information.
 
42
 
43
  Guidelines:
44
  1. Extract information that directly answers the user's query from the document excerpts.
45
+ 3. Provide the final response well-formatted and easily readable text along with the citation.
46
  4. Provide your complete response using the relevant parts in the documents.
47
  5. The generated response should answer the query directly addressing the user and avoiding additional information about how you work.
48
  6. If the provided excerpts do not fully answer the query, provide partial information and suggest which sections of the policy document the user should review for further details.
49
  7. If no relevant information is found in the provided excerpts, respond with 'No relevant information found in the provided excerpts.'
50
+
 
51
  """
52
  ]
53
 
 
119
  # Query the collection against the user query and return the top 10 results
120
  results = insurance_collection.query(
121
  query_texts=query,
122
+ n_results=50
123
  )
124
 
125
  # Store the query in cache_collection as document w.r.t to ChromaDB so that it can be embedded and searched against later