matt HOFFNER commited on
Commit
f616dbf
·
1 Parent(s): be1e35a
Files changed (2) hide show
  1. src/app/search/web/page.jsx +28 -29
  2. src/pages/api/llm.js +7 -6
src/app/search/web/page.jsx CHANGED
@@ -4,7 +4,6 @@ import { MemoizedReactMarkdown } from '../../../components/MemoizedReactMarkdown
4
 
5
  export default function WebSearchPage({ searchParams }) {
6
  const [aiResponse, setAiResponse] = useState("");
7
- const [eventSource, setEventSource] = useState(null);
8
  const [searchTerm, setSearchTerm] = useState()
9
 
10
  useEffect(() => {
@@ -12,35 +11,35 @@ export default function WebSearchPage({ searchParams }) {
12
  }, [searchParams])
13
 
14
  useEffect(() => {
15
-
16
- // No need to make a fetch request. Directly open the EventSource connection.
17
- const es = new EventSource(`/api/llm?question=${searchTerm || "Seattle activities this weekend"}`);
18
- console.log(es);
19
- setEventSource(es);
20
-
21
- return () => {
22
- if (es) es.close(); // Close the EventSource when the component is unmounted.
23
- };
24
- }, [searchParams, searchTerm]);
25
-
26
- // Add event listener for the EventSource
27
- useEffect(() => {
28
- if (eventSource) {
29
- eventSource.onmessage = (event) => {
30
- setAiResponse(prev => prev + event.data);
31
- };
32
-
33
- eventSource.onerror = (event) => {
34
- console.error("EventSource failed:", event);
35
- };
 
 
36
  }
37
-
38
- return () => {
39
- if (eventSource) {
40
- eventSource.close();
41
- }
42
- };
43
- }, [eventSource]);
44
 
45
  console.log(aiResponse);
46
 
 
4
 
5
  export default function WebSearchPage({ searchParams }) {
6
  const [aiResponse, setAiResponse] = useState("");
 
7
  const [searchTerm, setSearchTerm] = useState()
8
 
9
  useEffect(() => {
 
11
  }, [searchParams])
12
 
13
  useEffect(() => {
14
+ const decoder = new TextDecoder();
15
+ let responseText = '';
16
+ const controller = new AbortController();
17
+ const signal = controller.signal;
18
+
19
+ async function fetchData() {
20
+ const response = await fetch(`/api/llm`, {
21
+ method: 'POST',
22
+ headers: { 'Content-Type': 'application/json' },
23
+ body: JSON.stringify({ question: searchTerm || "Seattle activities this weekend" }),
24
+ signal,
25
+ });
26
+ const reader = response.body.getReader();
27
+
28
+ reader.read().then(function processText({ done, value }) {
29
+ if (done) {
30
+ console.log("Stream complete");
31
+ return;
32
+ }
33
+ responseText += decoder.decode(value);
34
+ setAiResponse(responseText);
35
+ return reader.read().then(processText);
36
+ });
37
  }
38
+
39
+ fetchData();
40
+
41
+ return () => controller.abort();
42
+ }, [searchParams, searchTerm]);
 
 
43
 
44
  console.log(aiResponse);
45
 
src/pages/api/llm.js CHANGED
@@ -1,17 +1,18 @@
1
  import { GoogleCustomSearch } from "openai-function-calling-tools";
2
  import { LLMError, LLMStream } from './stream';
3
 
4
- const handler = async (req) => {
5
  try {
6
  const googleCustomSearch = new GoogleCustomSearch({
7
  apiKey: process.env.API_KEY,
8
  googleCSEId: process.env.CONTEXT_KEY
9
  });
10
 
 
11
  const messages = [
12
  {
13
  role: "user",
14
- content: req.query.question
15
  },
16
  ];
17
 
@@ -25,15 +26,15 @@ const handler = async (req) => {
25
  const stream = await LLMStream({ id: "gpt-3.5-turbo-0613" }, promptToSend, 0.8, messages, functions);
26
  console.log(stream);
27
 
28
- return new Response(stream);
29
  } catch (error) {
30
  console.error(error);
31
  if (error instanceof LLMError) {
32
- return new Response('Error', { status: 500, statusText: error.message });
33
  } else {
34
- return new Response('Error', { status: 500 });
35
  }
36
  }
37
  };
38
 
39
- export default handler;
 
1
  import { GoogleCustomSearch } from "openai-function-calling-tools";
2
  import { LLMError, LLMStream } from './stream';
3
 
4
+ const handler = async (req, res) => {
5
  try {
6
  const googleCustomSearch = new GoogleCustomSearch({
7
  apiKey: process.env.API_KEY,
8
  googleCSEId: process.env.CONTEXT_KEY
9
  });
10
 
11
+ const requestBody = JSON.parse(req.body);
12
  const messages = [
13
  {
14
  role: "user",
15
+ content: requestBody.question
16
  },
17
  ];
18
 
 
26
  const stream = await LLMStream({ id: "gpt-3.5-turbo-0613" }, promptToSend, 0.8, messages, functions);
27
  console.log(stream);
28
 
29
+ res.status(200).send(stream);
30
  } catch (error) {
31
  console.error(error);
32
  if (error instanceof LLMError) {
33
+ res.status(500).send({ error: error.message });
34
  } else {
35
+ res.status(500).send({ error: 'An error occurred' });
36
  }
37
  }
38
  };
39
 
40
+ export default handler;