import os

from openai import OpenAI


class Qwen2ChatModel:
    def __init__(self, model_name, system_prompt=None, base_url=None, api_key=None,
                 min_tokens=1024, max_tokens=1024, temperature=0.1):
        self.base_url = base_url or os.getenv("OPENAI_BASE_URL")
        self.api_key = api_key or os.getenv("OPENAI_API_KEY")
        self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
        self.model_name = model_name
        self.min_tokens = min_tokens
        self.max_tokens = max_tokens
        self.temperature = temperature
        if system_prompt:
            self.system_prompt = system_prompt
        else:
            self.system_prompt = "You are a helpful assistant."

    def chat(self, question):
        response = self.client.chat.completions.create(
            model=self.model_name,
            messages=[
                {
                    'role': 'system',
                    'content': self.system_prompt
                },
                {
                    'role': 'user',
                    'content': question
                }
            ],
            max_tokens=self.max_tokens,
            temperature=self.temperature
        )

        return response.choices[0].message.content

    def chat_stream(self, question):
        response = self.client.chat.completions.create(
            model=self.model_name,
            messages=[
                {
                    'role': 'system',
                    'content': self.system_prompt
                },
                {
                    'role': 'user',
                    'content': question
                }
            ],
            max_tokens=self.max_tokens,
            temperature=self.temperature,
            stream=True
        )

        for chunk in response:
            yield chunk.choices[0].delta.content


def test():
    model = Qwen2ChatModel(model_name="/models/Qwen/QwQ-32B-Preview", base_url="http://10.1.30.3:38000/v1",
                           api_key="123456")
    response = model.chat("写一个Python快排")
    for chunk in response:
        print(chunk, end='', flush=True)
