mtyrrell commited on
Commit
c8f5440
·
1 Parent(s): 10a3be0

convo history truncation finalized

Browse files
Files changed (3) hide show
  1. app/main.py +26 -40
  2. app/models.py +0 -2
  3. app/utils.py +1 -5
app/main.py CHANGED
@@ -81,40 +81,30 @@ async def chatui_adapter(data):
81
  messages_value = getattr(data, 'messages', None)
82
  preprompt_value = getattr(data, 'preprompt', None)
83
 
84
- # Extract query - prefer structured messages over legacy text field
85
- if messages_value and len(messages_value) > 0:
86
- logger.info("✓ Using structured messages")
87
-
88
- # Convert dict messages to objects if needed
89
- messages = []
90
- for msg in messages_value:
91
- if isinstance(msg, dict):
92
- messages.append(type('Message', (), {
93
- 'role': msg.get('role', 'unknown'),
94
- 'content': msg.get('content', '')
95
- })())
96
- else:
97
- messages.append(msg)
98
-
99
- # Extract latest user query
100
- user_messages = [msg for msg in messages if msg.role == 'user']
101
- query = user_messages[-1].content if user_messages else text_value
102
-
103
- # Log conversation context
104
- logger.info(f"Processing query: {query}")
105
- logger.info(f"Total messages in conversation: {len(messages)}")
106
- logger.info(f"User messages: {len(user_messages)}, Assistant messages: {len([m for m in messages if m.role == 'assistant'])}")
107
-
108
- # Build conversation context for generation (last N turns)
109
- conversation_context = build_conversation_context(messages, max_turns=MAX_TURNS, max_chars=MAX_CHARS)
110
- logger.info(f"Messages: {messages}")
111
- logger.info(f"Conversation context: {conversation_context}")
112
- else:
113
- # Fallback to legacy text field
114
- query = text_value
115
- conversation_context = None
116
- logger.info(f"✗ Using legacy text field (messages not found or empty)")
117
- logger.info(f"Processing query: {query[:100]}...")
118
 
119
  full_response = ""
120
  sources_collected = None
@@ -190,14 +180,10 @@ async def chatui_file_adapter(data):
190
  query = user_messages[-1].content if user_messages else text_value
191
 
192
  logger.info(f"Processing query: {query}")
193
- logger.info(f"Total messages: {len(messages)}")
 
194
 
195
  conversation_context = build_conversation_context(messages, max_turns=MAX_TURNS, max_chars=MAX_CHARS)
196
- else:
197
- query = text_value
198
- conversation_context = None
199
- logger.info(f"✗ Using legacy text field")
200
- logger.info(f"Processing query: {query[:100]}...")
201
 
202
  file_content = None
203
  filename = None
 
81
  messages_value = getattr(data, 'messages', None)
82
  preprompt_value = getattr(data, 'preprompt', None)
83
 
84
+ # Extract query
85
+
86
+ # Convert dict messages to objects if needed
87
+ messages = []
88
+ for msg in messages_value:
89
+ if isinstance(msg, dict):
90
+ messages.append(type('Message', (), {
91
+ 'role': msg.get('role', 'unknown'),
92
+ 'content': msg.get('content', '')
93
+ })())
94
+ else:
95
+ messages.append(msg)
96
+
97
+ # Extract latest user query
98
+ user_messages = [msg for msg in messages if msg.role == 'user']
99
+ query = user_messages[-1].content if user_messages else text_value
100
+
101
+ # Log conversation context
102
+ logger.info(f"Processing query: {query}")
103
+ logger.info(f"Total messages in conversation: {len(messages)}")
104
+ logger.info(f"User messages: {len(user_messages)}, Assistant messages: {len([m for m in messages if m.role == 'assistant'])}")
105
+
106
+ # Build conversation context for generation (last N turns)
107
+ conversation_context = build_conversation_context(messages, max_turns=MAX_TURNS, max_chars=MAX_CHARS)
 
 
 
 
 
 
 
 
 
 
108
 
109
  full_response = ""
110
  sources_collected = None
 
180
  query = user_messages[-1].content if user_messages else text_value
181
 
182
  logger.info(f"Processing query: {query}")
183
+ logger.info(f"Total messages in conversation: {len(messages)}")
184
+ logger.info(f"User messages: {len(user_messages)}, Assistant messages: {len([m for m in messages if m.role == 'assistant'])}")
185
 
186
  conversation_context = build_conversation_context(messages, max_turns=MAX_TURNS, max_chars=MAX_CHARS)
 
 
 
 
 
187
 
188
  file_content = None
189
  filename = None
app/models.py CHANGED
@@ -29,13 +29,11 @@ class Message(BaseModel):
29
 
30
  class ChatUIInput(BaseModel):
31
  """Input model for text-only ChatUI requests"""
32
- text: str # Legacy: full concatenated prompt (for backward compatibility)
33
  messages: Optional[List[Message]] = None # Structured conversation history
34
  preprompt: Optional[str] = None
35
 
36
  class ChatUIFileInput(BaseModel):
37
  """Input model for ChatUI requests with file attachments"""
38
- text: str
39
  files: Optional[List[Dict[str, Any]]] = None
40
  messages: Optional[List[Message]] = None # Structured conversation history
41
  preprompt: Optional[str] = None
 
29
 
30
  class ChatUIInput(BaseModel):
31
  """Input model for text-only ChatUI requests"""
 
32
  messages: Optional[List[Message]] = None # Structured conversation history
33
  preprompt: Optional[str] = None
34
 
35
  class ChatUIFileInput(BaseModel):
36
  """Input model for ChatUI requests with file attachments"""
 
37
  files: Optional[List[Dict[str, Any]]] = None
38
  messages: Optional[List[Message]] = None # Structured conversation history
39
  preprompt: Optional[str] = None
app/utils.py CHANGED
@@ -123,7 +123,7 @@ def merge_state(base_state: GraphState, updates: dict) -> GraphState:
123
 
124
  def build_conversation_context(messages, max_turns: int = 3, max_chars: int = 8000) -> str:
125
  """
126
- Build conversation context from structured messages.
127
  Always keeps the first user and assistant messages, plus the last N turns.
128
 
129
  A "turn" is one user message + following assistant response.
@@ -161,7 +161,6 @@ def build_conversation_context(messages, max_turns: int = 3, max_chars: int = 80
161
  context_parts.append(msg_text)
162
  char_count += msg_chars
163
  msgs_included += 1
164
- logger.debug(f"Added first USER message ({msg_chars} chars)")
165
 
166
  if first_assistant_msg:
167
  msg_text = f"ASSISTANT: {first_assistant_msg.content}"
@@ -170,7 +169,6 @@ def build_conversation_context(messages, max_turns: int = 3, max_chars: int = 80
170
  context_parts.append(msg_text)
171
  char_count += msg_chars
172
  msgs_included += 1
173
- logger.debug(f"Added first ASSISTANT message ({msg_chars} chars)")
174
 
175
  # Collect last N complete turns (user + assistant pairs)
176
  # Find the last N user messages and their corresponding assistant responses
@@ -223,12 +221,10 @@ def build_conversation_context(messages, max_turns: int = 3, max_chars: int = 80
223
  msgs_included += 1
224
 
225
  turn_count += 1
226
- logger.debug(f"Added turn {turn_count}: user + assistant messages")
227
 
228
  # Add recent messages to context
229
  context_parts.extend(recent_messages)
230
 
231
  context = "\n\n".join(context_parts)
232
- logger.info(f"Built conversation context: {turn_count} recent user turns, {msgs_included} total messages, {char_count} chars")
233
 
234
  return context
 
123
 
124
  def build_conversation_context(messages, max_turns: int = 3, max_chars: int = 8000) -> str:
125
  """
126
+ Build conversation context from structured messages to send to generator.
127
  Always keeps the first user and assistant messages, plus the last N turns.
128
 
129
  A "turn" is one user message + following assistant response.
 
161
  context_parts.append(msg_text)
162
  char_count += msg_chars
163
  msgs_included += 1
 
164
 
165
  if first_assistant_msg:
166
  msg_text = f"ASSISTANT: {first_assistant_msg.content}"
 
169
  context_parts.append(msg_text)
170
  char_count += msg_chars
171
  msgs_included += 1
 
172
 
173
  # Collect last N complete turns (user + assistant pairs)
174
  # Find the last N user messages and their corresponding assistant responses
 
221
  msgs_included += 1
222
 
223
  turn_count += 1
 
224
 
225
  # Add recent messages to context
226
  context_parts.extend(recent_messages)
227
 
228
  context = "\n\n".join(context_parts)
 
229
 
230
  return context