wuyiqunLu commited on
Commit
3ebf44a
β€’
1 Parent(s): 2c2b487

feat: save raw message to db (#63)

Browse files

<img width="1268" alt="image"
src="https://github.com/landing-ai/vision-agent-ui/assets/132986242/83c315e2-2fe5-401f-a046-3292882bb05b">

components/chat/ChatMessage.tsx CHANGED
@@ -11,11 +11,6 @@ import { CodeBlock } from '@/components/ui/CodeBlock';
11
  import { MemoizedReactMarkdown } from '@/components/chat/MemoizedReactMarkdown';
12
  import { IconLandingAI, IconUser } from '@/components/ui/Icons';
13
  import { MessageBase } from '../../lib/types';
14
- import {
15
- Tooltip,
16
- TooltipContent,
17
- TooltipTrigger,
18
- } from '@/components/ui/Tooltip';
19
  import Img from '../ui/Img';
20
  import { getCleanedUpMessages } from '@/lib/messageUtils';
21
  import Loading from '../ui/Loading';
@@ -143,14 +138,16 @@ const Markdown: React.FC<{
143
  );
144
  };
145
 
146
- export function ChatMessage({ message, isLoading }: ChatMessageProps) {
147
- const { logs, content } = useMemo(() => {
 
 
 
148
  return getCleanedUpMessages({
149
  content: message.content,
150
  role: message.role,
151
  });
152
  }, [message.content, message.role]);
153
- console.log('[Ming] logs:', logs);
154
  console.log('[Ming] content:', content);
155
  console.log('[Ming] raw:', message.content);
156
  const [details, setDetails] = useState<string>('');
@@ -172,7 +169,7 @@ export function ChatMessage({ message, isLoading }: ChatMessageProps) {
172
  {message.role === 'user' ? <IconUser /> : <IconLandingAI />}
173
  </div>
174
  <div className="flex-1 px-1 ml-4 space-y-2 overflow-hidden">
175
- {logs && <Markdown content={logs} setDetails={setDetails} />}
176
  {isLoading && <Loading />}
177
  </div>
178
  <Dialog open={!!details} onOpenChange={open => !open && setDetails('')}>
 
11
  import { MemoizedReactMarkdown } from '@/components/chat/MemoizedReactMarkdown';
12
  import { IconLandingAI, IconUser } from '@/components/ui/Icons';
13
  import { MessageBase } from '../../lib/types';
 
 
 
 
 
14
  import Img from '../ui/Img';
15
  import { getCleanedUpMessages } from '@/lib/messageUtils';
16
  import Loading from '../ui/Loading';
 
138
  );
139
  };
140
 
141
+ export function ChatMessage({
142
+ message,
143
+ isLoading,
144
+ }: ChatMessageProps) {
145
+ const { content } = useMemo(() => {
146
  return getCleanedUpMessages({
147
  content: message.content,
148
  role: message.role,
149
  });
150
  }, [message.content, message.role]);
 
151
  console.log('[Ming] content:', content);
152
  console.log('[Ming] raw:', message.content);
153
  const [details, setDetails] = useState<string>('');
 
169
  {message.role === 'user' ? <IconUser /> : <IconLandingAI />}
170
  </div>
171
  <div className="flex-1 px-1 ml-4 space-y-2 overflow-hidden">
172
+ {content && <Markdown content={content} setDetails={setDetails} />}
173
  {isLoading && <Loading />}
174
  </div>
175
  <Dialog open={!!details} onOpenChange={open => !open && setDetails('')}>
lib/hooks/useVisionAgent.ts CHANGED
@@ -74,52 +74,10 @@ const useVisionAgent = (chat: ChatWithMessages) => {
74
  }
75
  },
76
  onFinish: async message => {
77
- const { logs = '', content, images } = getCleanedUpMessages(message);
78
- if (images?.length) {
79
- const publicUrls = await Promise.all(
80
- images.map((image, index) =>
81
- uploadBase64(image, message.id, id ?? 'no-id', index),
82
- ),
83
- );
84
- const newContent = publicUrls.reduce((accum, url, index) => {
85
- return accum.replace(
86
- generateAnswersImageMarkdown(index, '/loading.gif'),
87
- generateAnswersImageMarkdown(index, url),
88
- );
89
- }, content);
90
- const newMessage = {
91
- ...message,
92
- content: logs + CLEANED_SEPARATOR + newContent,
93
- };
94
- setMessages([
95
- ...messages,
96
- /**
97
- * A workaround to fix the issue of the messages been stale state when appending a new message
98
- * https://github.com/vercel/ai/issues/550#issuecomment-1712693371
99
- */
100
- ...(input
101
- ? [
102
- {
103
- id: nanoid(),
104
- role: 'user',
105
- content:
106
- input + '\n\n' + generateInputImageMarkdown(mediaUrl),
107
- createdAt: new Date(),
108
- } satisfies Message,
109
- ]
110
- : []),
111
- newMessage,
112
- ]);
113
- await dbPostCreateMessage(id, {
114
- role: newMessage.role as 'user' | 'assistant',
115
- content: newMessage.content,
116
- });
117
- } else {
118
- await dbPostCreateMessage(id, {
119
- role: message.role as 'user' | 'assistant',
120
- content: logs + CLEANED_SEPARATOR + content,
121
- });
122
- }
123
  },
124
  initialMessages: initialMessages,
125
  body: {
 
74
  }
75
  },
76
  onFinish: async message => {
77
+ await dbPostCreateMessage(id, {
78
+ role: message.role as 'user' | 'assistant',
79
+ content: message.content,
80
+ });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  },
82
  initialMessages: initialMessages,
83
  body: {
lib/messageUtils.ts CHANGED
@@ -67,21 +67,6 @@ const generateJSONArrayMarkdown = (
67
  return message;
68
  };
69
 
70
- const generateStringArrayMarkdown = (
71
- message: string,
72
- header: string,
73
- payload: Array<string>,
74
- ) => {
75
- message += '\n';
76
- message += '| ' + header + ' |' + '\n';
77
- message += '| ' + ':-' + ' |' + '\n';
78
- payload.forEach((tool: string) => {
79
- message += '| ' + tool + ' |' + '\n';
80
- });
81
- message += '\n';
82
- return message;
83
- };
84
-
85
  const generateCodeExecutionMarkdown = (
86
  message: string,
87
  payload: {
@@ -105,11 +90,7 @@ const generateCodeExecutionMarkdown = (
105
 
106
  const generateFinalCodeMarkdown = (
107
  message: string,
108
- payload: {
109
- code: string;
110
- test: string;
111
- result: string;
112
- },
113
  ) => {
114
  message += 'Final Code: \n';
115
  message += `\`\`\`python\n${payload.code}\n\`\`\`\n`;
@@ -187,7 +168,17 @@ type FinalCodeBody = {
187
  payload: {
188
  code: string;
189
  test: string;
190
- result: string;
 
 
 
 
 
 
 
 
 
 
191
  };
192
  };
193
 
@@ -246,7 +237,7 @@ const getMessageTitle = (json: MessageBody) => {
246
  if (json.status === 'completed') {
247
  return 'βœ… The vision agent has concluded the chat, the last execution is successful. \n';
248
  } else {
249
- return '❌ he vision agent has concluded the chat, the last execution is failed. \n';
250
  }
251
  default:
252
  throw 'Not supported type';
@@ -279,17 +270,10 @@ export const getCleanedUpMessages = ({
279
  }: Pick<MessageBase, 'role' | 'content'>) => {
280
  if (role === 'user') {
281
  return {
282
- logs: content,
283
- };
284
- }
285
- if (content.split(CLEANED_SEPARATOR).length === 2) {
286
- return {
287
- logs: content.split(CLEANED_SEPARATOR)[0],
288
- content: content.split(CLEANED_SEPARATOR)[1],
289
  };
290
  }
291
- const [logs = '', answer = ''] = content.split('<ANSWER>');
292
- const lines = logs.split('\n');
293
  let formattedLogs = '';
294
  const jsons: MessageBody[] = [];
295
  for (let line of lines) {
@@ -313,21 +297,7 @@ export const getCleanedUpMessages = ({
313
  }
314
  }
315
  jsons.forEach(json => (formattedLogs += parseLine(json)));
316
- const [answerText, imagesStr = ''] = answer.split('<VIZ>');
317
- const [imagesArrayStr, ...rest] = imagesStr.split('</VIZ>');
318
- const images = imagesArrayStr
319
- .split('</IMG>')
320
- .map(str => str.replace('<IMG>', ''))
321
- .slice(0, -1);
322
  return {
323
- logs: formattedLogs,
324
- content:
325
- answerText.replace('</</ANSWER>', '').replace('</ANSWER>', '') +
326
- '\n\n' +
327
- images
328
- .map((_, index) => generateAnswersImageMarkdown(index, '/loading.gif'))
329
- .join('') +
330
- rest.join(''),
331
- images: images,
332
  };
333
  };
 
67
  return message;
68
  };
69
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  const generateCodeExecutionMarkdown = (
71
  message: string,
72
  payload: {
 
90
 
91
  const generateFinalCodeMarkdown = (
92
  message: string,
93
+ payload: FinalCodeBody['payload'],
 
 
 
 
94
  ) => {
95
  message += 'Final Code: \n';
96
  message += `\`\`\`python\n${payload.code}\n\`\`\`\n`;
 
168
  payload: {
169
  code: string;
170
  test: string;
171
+ result: {
172
+ logs: {
173
+ stderr: string[];
174
+ stdout: string[];
175
+ };
176
+ results: Array<{
177
+ png: string;
178
+ text: string;
179
+ is_main_result: boolean;
180
+ }>;
181
+ };
182
  };
183
  };
184
 
 
237
  if (json.status === 'completed') {
238
  return 'βœ… The vision agent has concluded the chat, the last execution is successful. \n';
239
  } else {
240
+ return '❌ The vision agent has concluded the chat, the last execution is failed. \n';
241
  }
242
  default:
243
  throw 'Not supported type';
 
270
  }: Pick<MessageBase, 'role' | 'content'>) => {
271
  if (role === 'user') {
272
  return {
273
+ content,
 
 
 
 
 
 
274
  };
275
  }
276
+ const lines = content.split('\n');
 
277
  let formattedLogs = '';
278
  const jsons: MessageBody[] = [];
279
  for (let line of lines) {
 
297
  }
298
  }
299
  jsons.forEach(json => (formattedLogs += parseLine(json)));
 
 
 
 
 
 
300
  return {
301
+ content: formattedLogs,
 
 
 
 
 
 
 
 
302
  };
303
  };
next.config.js CHANGED
@@ -10,6 +10,11 @@ module.exports = {
10
  },
11
  ],
12
  },
 
 
 
 
 
13
  experimental: {
14
  serverComponentsExternalPackages: ['pino', 'pino-loki'],
15
  },
 
10
  },
11
  ],
12
  },
13
+ experimental: {
14
+ serverActions: {
15
+ bodySizeLimit: '10mb',
16
+ },
17
+ },
18
  experimental: {
19
  serverComponentsExternalPackages: ['pino', 'pino-loki'],
20
  },