File size: 972 Bytes
0cc999a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import openai
from .base_model import BaseLLMModel
from .. import shared
from ..config import retrieve_proxy


class OpenAI_Instruct_Client(BaseLLMModel):
    def __init__(self, model_name, api_key, user_name="") -> None:
        super().__init__(model_name=model_name, user=user_name)
        self.api_key = api_key

    def _get_instruct_style_input(self):
        return "\n\n".join([item["content"] for item in self.history])

    @shared.state.switching_api_key
    def get_answer_at_once(self):
        prompt = self._get_instruct_style_input()
        with retrieve_proxy():
            response = openai.Completion.create(
                api_key=self.api_key,
                api_base=shared.state.openai_api_base,
                model=self.model_name,
                prompt=prompt,
                temperature=self.temperature,
                top_p=self.top_p,
                )
        return response.choices[0].text.strip(), response.usage["total_tokens"]