#!/usr/bin/env python
# -*- coding:utf-8 -*-
from typing import Tuple

from openai import OpenAI
from openai.types.chat import ChatCompletionMessage
from langchain_openai import ChatOpenAI


class DeepSeekModel:
    model = 'deepseek-chat'
    max_tokens = 4096
    temperature = 0.8
    base_url = "https://api.deepseek.com"

    def __init__(self, api_key: str):
        self.api_key = api_key
        self.client = ChatOpenAI(model=self.model, api_key=self.api_key, base_url=self.base_url,
                                 max_tokens=self.max_tokens, temperature=self.temperature)

    def chat(self,
             messages: list,
             model: str = 'deepseek-chat',
             stream: bool = False,
             max_tokens: int = None,
             temperature: float = None) -> str:
        """
            Chat with the DeepSeek LLM
        :param model : model name
        :param messages: list of messages
        :param stream: stream
        :param max_tokens: max tokens
        :param temperature: temperature
        """
        response = self.client.invoke(
            input=messages,
        )
        if stream:
            return response

        return response.content

    @classmethod
    def get_response(cls, response, stream=True):
        if stream:
            return response[1]
        else:
            return response.content
