matt HOFFNER commited on
Commit
2016044
·
1 Parent(s): f6091f7
Files changed (1) hide show
  1. src/pages/api/llm.js +42 -32
src/pages/api/llm.js CHANGED
@@ -1,9 +1,8 @@
1
  import { Configuration, OpenAIApi } from "openai";
2
  import { GoogleCustomSearch } from "openai-function-calling-tools";
3
 
4
- export default async function handler(req, res) {
5
  if (req.method !== 'POST') {
6
- // Handle any other HTTP method
7
  res.status(405).send({ error: 'Method Not Allowed', method: req.method });
8
  return;
9
  }
@@ -46,36 +45,47 @@ export default async function handler(req, res) {
46
 
47
  return response;
48
  };
49
-
50
- let response;
51
-
52
- while (true) {
53
- response = await getCompletion(messages);
54
-
55
- if (response.data.choices[0].finish_reason === "stop") {
56
- res.status(200).json({ result: response.data.choices[0].message.content });
57
- break;
58
- } else if (response.data.choices[0].finish_reason === "function_call") {
59
- const fnName = response.data.choices[0].message.function_call.name;
60
- const args = response.data.choices[0].message.function_call.arguments;
61
-
62
- const fn = functions[fnName];
63
- const result = await fn(...Object.values(JSON.parse(args)));
64
-
65
- messages.push({
66
- role: "assistant",
67
- content: "",
68
- function_call: {
 
 
 
 
 
 
 
 
 
 
 
 
69
  name: fnName,
70
- arguments: args,
71
- },
72
- });
73
-
74
- messages.push({
75
- role: "function",
76
- name: fnName,
77
- content: JSON.stringify({ result: result }),
78
- });
79
  }
80
- }
 
 
 
 
 
81
  }
 
1
  import { Configuration, OpenAIApi } from "openai";
2
  import { GoogleCustomSearch } from "openai-function-calling-tools";
3
 
4
+ export default function handler(req, res) {
5
  if (req.method !== 'POST') {
 
6
  res.status(405).send({ error: 'Method Not Allowed', method: req.method });
7
  return;
8
  }
 
45
 
46
  return response;
47
  };
48
+
49
+ res.setHeader('Content-Type', 'text/event-stream');
50
+ res.setHeader('Cache-Control', 'no-cache');
51
+ res.setHeader('Connection', 'keep-alive');
52
+
53
+ const processCompletions = async () => {
54
+ let response;
55
+
56
+ while (true) {
57
+ response = await getCompletion(messages);
58
+
59
+ if (response.data.choices[0].finish_reason === "stop") {
60
+ res.write(`data: ${JSON.stringify({ result: response.data.choices[0].message.content })}\n\n`);
61
+ break;
62
+ } else if (response.data.choices[0].finish_reason === "function_call") {
63
+ const fnName = response.data.choices[0].message.function_call.name;
64
+ const args = response.data.choices[0].message.function_call.arguments;
65
+
66
+ const fn = functions[fnName];
67
+ const result = await fn(...Object.values(JSON.parse(args)));
68
+
69
+ messages.push({
70
+ role: "assistant",
71
+ content: "",
72
+ function_call: {
73
+ name: fnName,
74
+ arguments: args,
75
+ },
76
+ });
77
+
78
+ messages.push({
79
+ role: "function",
80
  name: fnName,
81
+ content: JSON.stringify({ result: result }),
82
+ });
83
+ }
 
 
 
 
 
 
84
  }
85
+ };
86
+
87
+ processCompletions().catch(err => {
88
+ console.error(err);
89
+ res.status(500).send({ error: 'Internal Server Error' });
90
+ });
91
  }