File size: 2,337 Bytes
58974f8
 
 
e1ce828
58974f8
e1ce828
 
 
58974f8
 
 
 
 
 
 
 
 
 
e1ce828
 
 
 
 
 
 
 
 
 
 
 
 
 
58974f8
 
 
 
e1ce828
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58974f8
 
 
 
 
e1ce828
58974f8
e1ce828
 
58974f8
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from .base_model import BaseModel

import openai
from openai import AsyncOpenAI, OpenAI
from tqdm import tqdm
import asyncio
import os

class GPT4Model(BaseModel):
    def __init__(self, 
                 generation_model="gpt-4-vision-preview", 
                 embedding_model="text-embedding-ada-002",
                 temperature=0, 
        ) -> None:
        self.generation_model = generation_model
        self.embedding_model = embedding_model
        self.temperature = temperature
    
    async def respond_async(self, messages: list[dict]) -> str:
        client = AsyncOpenAI(
            api_key=os.environ["OPENAI_API_KEY"],
            base_url=os.environ["OPENAI_API_BASE"]
        )
        print("start api call")
        output = await client.chat.completions.create(
            messages=messages,
            model=self.generation_model,
            temperature=self.temperature,
            max_tokens=1000,
        )
        print("end api call")
        response = output.choices[0].message.content
        # content = response.choices[0]['message']['content']
        
        return response
    
    def respond(self, messages: list[dict]) -> str:
        client = OpenAI(
            api_key=os.environ["OPENAI_API_KEY"],
            base_url=os.environ["OPENAI_API_BASE"]
        )
        # OpenAI.api_key=os.environ["OPENAI_API_KEY"]
        # OpenAI.api_base=os.environ["OPENAI_API_BASE"]
        response = client.chat.completions.create(
            messages=messages,
            model=self.generation_model,
            temperature=self.temperature,
            max_tokens=1000,
        ).choices[0].message.content
        return response
    
    def embedding(self, texts: list[str]) -> list[float]:
        client = OpenAI(
            api_key=os.environ["OPENAI_API_KEY"],
            base_url=os.environ["OPENAI_API_BASE"]
        )
        data = []
        # print(f"{self.embedding_model} Embedding:")
        for i in range(0, len(texts), 2048):
            lower = i
            upper = min(i+2048, len(texts))
            data += client.embeddings.create(input=texts[lower:upper],
                                            model=self.embedding_model
                                            ).data
        embeddings = [d.embedding for d in data]
        
        return embeddings