File size: 1,270 Bytes
f6091f7
916e00a
f6091f7
2a8011b
916e00a
86e6f59
82ddd60
916e00a
 
82ddd60
f6091f7
82ddd60
916e00a
 
 
e35a983
916e00a
 
 
 
2a8011b
916e00a
3a7e216
916e00a
7155c10
9daa0fe
2a8011b
ac5a6cc
2a8011b
 
ac5a6cc
 
2a8011b
 
ac5a6cc
 
916e00a
 
 
ac5a6cc
916e00a
ac5a6cc
29df9bc
916e00a
 
2016044
ac5a6cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import { GoogleCustomSearch } from "openai-function-calling-tools";
import { LLMError, LLMStream } from './stream';

const handler = async (req, res) => {
  try {
    console.log(req.body.question);

    const googleCustomSearch = new GoogleCustomSearch({
        apiKey: process.env.API_KEY,
        googleCSEId: process.env.CONTEXT_KEY,
    });

    const messages = [
        {
          role: "user",
          content: req.body.question,
        },
      ];
    
    const functions = {
        googleCustomSearch
    };
    console.log(messages);

    const promptToSend = "You are a helpful assistant, a search term is provided and you are given search results to help provide a useful response.";
    const stream = await LLMStream({ id: "gpt-3.5-turbo-0613" }, promptToSend, 0.8, messages, functions);

    let data = '';
    const decoder = new TextDecoder();
    for await (const chunk of stream) {
        data += decoder.decode(chunk);
        res.write(data);
    }

    return res.end();

  } catch (error) {
    console.error(error);
    if (error instanceof LLMError) {
      return new Response('Error', { status: 500, statusText: error.message });
    } else {
      return new Response('Error', { status: 500 });
    }
  }
};

export default handler;