omkar334 commited on
Commit
37f14ac
1 Parent(s): c83181d

global client

Browse files
Files changed (2) hide show
  1. agent.py +3 -7
  2. app.py +6 -5
agent.py CHANGED
@@ -1,7 +1,6 @@
1
  from dotenv import load_dotenv
2
  from strictjson import strict_json_async
3
 
4
- from client import HybridClient
5
  from prompts import AGENT_PROMPT, RAG_SYS_PROMPT, RAG_USER_PROMPT
6
  from sarvam import speaker, translator
7
 
@@ -53,24 +52,21 @@ async def call_agent(user_prompt, grade, subject):
53
  return result
54
 
55
 
56
- async def function_caller(user_prompt, grade, subject, chapter):
 
 
57
  result = await call_agent(user_prompt, grade, subject)
58
- print(result)
59
  function = result["function"].lower()
60
 
61
  if function == "none":
62
  return result["response"]
63
 
64
  elif function == "retriever":
65
- client = HybridClient()
66
- collection = f"{grade}_{subject.lower()}_{chapter}"
67
-
68
  data = client.search(collection, user_prompt)
69
  data = [i.document for i in data]
70
 
71
  system_prompt = RAG_SYS_PROMPT.format(subject, grade)
72
  user_prompt = RAG_USER_PROMPT.format(data, user_prompt)
73
- print(user_prompt)
74
 
75
  response = await llm(system_prompt, user_prompt)
76
 
 
1
  from dotenv import load_dotenv
2
  from strictjson import strict_json_async
3
 
 
4
  from prompts import AGENT_PROMPT, RAG_SYS_PROMPT, RAG_USER_PROMPT
5
  from sarvam import speaker, translator
6
 
 
52
  return result
53
 
54
 
55
+ async def function_caller(user_prompt, collection, client):
56
+ grade, subject, chapter = collection.split("_")
57
+
58
  result = await call_agent(user_prompt, grade, subject)
 
59
  function = result["function"].lower()
60
 
61
  if function == "none":
62
  return result["response"]
63
 
64
  elif function == "retriever":
 
 
 
65
  data = client.search(collection, user_prompt)
66
  data = [i.document for i in data]
67
 
68
  system_prompt = RAG_SYS_PROMPT.format(subject, grade)
69
  user_prompt = RAG_USER_PROMPT.format(data, user_prompt)
 
70
 
71
  response = await llm(system_prompt, user_prompt)
72
 
app.py CHANGED
@@ -7,8 +7,10 @@ from fastapi.middleware.cors import CORSMiddleware
7
  from pydantic import BaseModel
8
 
9
  from agent import function_caller
 
10
 
11
  app = FastAPI()
 
12
 
13
  app.add_middleware(
14
  CORSMiddleware,
@@ -21,14 +23,12 @@ app.add_middleware(
21
 
22
  class ChatQuery(BaseModel):
23
  query: str
24
- grade: str
25
- subject: str
26
- chapter: str
27
 
28
 
29
  @app.post("/chat")
30
  async def chat(query: ChatQuery):
31
- result = await function_caller(query.query, query.grade, query.subject, query.chapter)
32
 
33
  if isinstance(result, str):
34
  return {"text": result}
@@ -43,7 +43,8 @@ async def chat(query: ChatQuery):
43
 
44
 
45
  async def gradio_interface(input_text, grade, subject, chapter):
46
- response = await chat(ChatQuery(query=input_text, grade=grade, subject=subject, chapter=chapter))
 
47
  if "text" in response:
48
  return response["text"], None
49
  elif "audio" in response:
 
7
  from pydantic import BaseModel
8
 
9
  from agent import function_caller
10
+ from client import HybridClient
11
 
12
  app = FastAPI()
13
+ hclient = HybridClient()
14
 
15
  app.add_middleware(
16
  CORSMiddleware,
 
23
 
24
  class ChatQuery(BaseModel):
25
  query: str
26
+ collection: str
 
 
27
 
28
 
29
  @app.post("/chat")
30
  async def chat(query: ChatQuery):
31
+ result = await function_caller(query.query, query.collection, hclient)
32
 
33
  if isinstance(result, str):
34
  return {"text": result}
 
43
 
44
 
45
  async def gradio_interface(input_text, grade, subject, chapter):
46
+ collection = f"{grade}_{subject.lower()}_{chapter}"
47
+ response = await chat(ChatQuery(query=input_text, collection=collection))
48
  if "text" in response:
49
  return response["text"], None
50
  elif "audio" in response: