alaselababatunde commited on
Commit
2f34bac
·
1 Parent(s): 254e2bc
Files changed (1) hide show
  1. main.py +27 -3
main.py CHANGED
@@ -8,6 +8,7 @@ from pydantic import BaseModel
8
  import torch
9
  import logging
10
  import os
 
11
 
12
  from huggingface_hub import login
13
  from langchain.llms.huggingface_pipeline import HuggingFacePipeline
@@ -102,7 +103,7 @@ Conversation so far:
102
  {conversation_history}
103
 
104
  User: {query}
105
- Tech Disciples AI (respond with warmth, depth, and Biblical understanding):
106
  """
107
 
108
  prompt = PromptTemplate(
@@ -112,6 +113,25 @@ prompt = PromptTemplate(
112
 
113
  chain = LLMChain(prompt=prompt, llm=llm, memory=memory) if llm else None
114
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
  # =====================================================
116
  # REQUEST MODEL
117
  # =====================================================
@@ -136,7 +156,11 @@ async def ai_chat(data: QueryInput, x_api_key: str = Header(None)):
136
 
137
  try:
138
  response = chain.run(query=data.query.strip())
139
- return {"reply": response.strip()}
 
 
 
 
140
  except Exception as e:
141
  logger.error(f"⚠️ Model runtime error: {e}")
142
- raise HTTPException(status_code=500, detail=f"Model failed to respond — {e}")
 
8
  import torch
9
  import logging
10
  import os
11
+ import re
12
 
13
  from huggingface_hub import login
14
  from langchain.llms.huggingface_pipeline import HuggingFacePipeline
 
103
  {conversation_history}
104
 
105
  User: {query}
106
+ Tech Disciples AI:
107
  """
108
 
109
  prompt = PromptTemplate(
 
113
 
114
  chain = LLMChain(prompt=prompt, llm=llm, memory=memory) if llm else None
115
 
116
+ # =====================================================
117
+ # HELPER FUNCTION TO CLEAN RESPONSE
118
+ # =====================================================
119
+ def clean_response(text):
120
+ """Remove unwanted markers and clean up the response"""
121
+ # Remove common end markers
122
+ text = re.sub(r'\[End of conversation\]', '', text, flags=re.IGNORECASE)
123
+ text = re.sub(r'\[END\]', '', text, flags=re.IGNORECASE)
124
+ text = re.sub(r'<\|endoftext\|>', '', text)
125
+ text = re.sub(r'</s>', '', text)
126
+
127
+ # Remove excessive newlines
128
+ text = re.sub(r'\n{3,}', '\n\n', text)
129
+
130
+ # Strip whitespace
131
+ text = text.strip()
132
+
133
+ return text
134
+
135
  # =====================================================
136
  # REQUEST MODEL
137
  # =====================================================
 
156
 
157
  try:
158
  response = chain.run(query=data.query.strip())
159
+
160
+ # Clean the response before sending
161
+ cleaned_response = clean_response(response)
162
+
163
+ return {"reply": cleaned_response}
164
  except Exception as e:
165
  logger.error(f"⚠️ Model runtime error: {e}")
166
+ raise HTTPException(status_code=500, detail=f"Model failed to respond — {e}")