File size: 3,852 Bytes
f11dfb5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import openai


class MessageChatCompletion:
    def __init__(self,
                 model: str = 'gpt-3.5-turbo-0125',
                 message: str = '',
                 api_key: str = '',
                 temperature: float = 0.10,
                 top_p: float = 0.95,
                 n: int = 1,
                 stream: bool = False,
                 stop: str = "\n",
                 max_tokens: int = 4096,
                 presence_penalty: float = 0.0,
                 frequency_penalty: float = 0.0,
                 logit_bias: int = None,
                 user: str = ''):

        self.api_key = api_key
        openai.api_key = self.api_key

        if model in ["gpt-4", "gpt-4-turbo-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-0125"]:
            self.endpoint = "https://api.openai.com/v1/chat/completions"
        else:
            self.endpoint = "https://api.openai.com/v1/completions"

        self.headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {self.api_key}",
        }

        self.prompt = {
            "model": model,
            "messages": [],
            "temperature": temperature,
            "top_p": top_p,
            "n": n,
            "stream": stream,
            "stop": stop,
            "presence_penalty": presence_penalty,
            "frequency_penalty": frequency_penalty
        }

        if max_tokens is not None:
            self.prompt["max_tokens"] = max_tokens

        if logit_bias is not None:
            self.prompt["logit_bias"] = logit_bias

        if user != '':
            self.prompt["user"] = user

        if message != '':
            self.new_user_message(content=message)

        self.response = ''

        self.error = False

    def new_message(self, role: str = 'user', content: str = '', name: str = ''):
        new_message = {"role": role, "content": f"{content}"}
        if name != '':
            new_message['name'] = name

        self.prompt['messages'].append(new_message)

    def new_user_message(self, content: str = '', name: str = ''):
        self.new_message(role='user', content=content, name=name)

    def new_system_message(self, content: str = '', name: str = ''):
        self.new_message(role='system', content=content, name=name)

    def new_assistant_message(self, content: str = '', name: str = ''):
        self.new_message(role='assistant', content=content, name=name)

    def get_last_message(self):
        return self.prompt['messages'][-1]['content']

    def send_message(self):

        try:
            self.error = False

            response = openai.chat.completions.create(
                model=self.prompt['model'],
                messages=self.prompt['messages'],
                frequency_penalty=self.prompt['frequency_penalty'],
                temperature=self.prompt['temperature'],
                max_tokens=self.prompt['max_tokens'],
                top_p=self.prompt['top_p'],
                presence_penalty=self.prompt['presence_penalty'],
                stream=self.prompt['stream']
            )

            full_response = response.choices[0].message.content

            # if stream = True
            # full_response = ""
            # for chunk in response:
            #     chunk_message = chunk.choices[0].delta.content
            #     if chunk_message != '':
            #         full_response += chunk_message

            self.new_system_message(content=full_response)

            return self.response

        except Exception as e:
            self.error = True

            if self.api_key == '' or self.api_key is None:
                self.new_system_message(content="API key is missing")
            else:
                self.new_system_message(content=f"Unable to generate ChatCompletion response\nException: {e}")
            return e