#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2024/5/13 9:56
# @Author : Wangyt
import json
from typing import List, Optional

from openai import OpenAI
from loguru import logger


class DeepSeekChat:
	def __init__(self, llm_id="ep-20250226184914-c495r"):
		# DS
		# self.base_url = "https://api.deepseek.com"
		# self.llm_id = "deepseek-chat"
		# self.api_key = "sk-19049cde971641c4a7fee62b15e383c0"
		# self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
		# self.history = []
		# QWEN
		self.api_key = "sk-6f89326783dd42b3b9c63284ef454cf2"
		self.llm_id = "qwen3-32b"
		self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
		self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
		self.history = []
		# 火山
		# self.base_url = "https://ark.cn-beijing.volces.com/api/v3"
		# self.llm_id = llm_id or "ep-20250226184914-c495r"
		# self.api_key = "4130db91-11f6-44cd-b3bb-71584e47c61d"
		# self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
		# self.history = []

	def deal_history(self, history_list):
		return history_list[-6:]

	def stream_chat(self, query: str, **kwargs):

		history = self.deal_history(self.history)  # 只用最近几轮的记录
		history.append({"role": "user", "content": query})
		response = self.client.chat.completions.create(
			model=self.llm_id,
			messages=history,
			stream=True,
			temperature=0.7
		)
		res = ''
		for chunk in response:
			chunk_message = chunk.choices[0].delta.content
			if chunk_message:
				res += chunk_message
				yield chunk_message

	def json_stream_chat(self, query: str, temperature: float = 0.3, top_p: Optional[float] = None, **kwargs):
		history = self.deal_history(self.history)  # 只用最近几轮的记录
		history.append({"role": "user", "content": query})
		if top_p:
			response = self.client.chat.completions.create(
				model=self.llm_id,
				messages=history,
				stream=True,
				temperature=temperature,
				top_p=top_p,
				max_tokens=7000,
				response_format={'type': 'json_object'},
				extra_body={"enable_thinking": False},
			)
		else:
			response = self.client.chat.completions.create(
				model=self.llm_id,
				messages=history,
				stream=True,
				temperature=temperature,
				max_tokens=7000,
				response_format={'type': 'json_object'},
				extra_body={"enable_thinking": False},
			)
		res = ''
		for chunk in response:
			chunk_message = chunk.choices[0].delta.content
			if chunk_message:
				res += chunk_message
				yield chunk_message

	async def a_json_stream_chat(self, query: str, temperature: float = 0.3, top_p: Optional[float] = None, **kwargs):
		history = self.deal_history(self.history)  # 只用最近几轮的记录
		history.append({"role": "user", "content": query})
		if top_p:
			response = self.client.chat.completions.create(
				model=self.llm_id,
				messages=history,
				stream=True,
				temperature=temperature,
				top_p=top_p,
				max_tokens=7000,
				response_format={'type': 'json_object'},
				extra_body={"enable_thinking": False},
			)
		else:
			response = self.client.chat.completions.create(
				model=self.llm_id,
				messages=history,
				stream=True,
				temperature=temperature,
				max_tokens=7000,
				response_format={'type': 'json_object'},
				extra_body={"enable_thinking": False},
			)
		res = ''
		for chunk in response:
			chunk_message = chunk.choices[0].delta.content
			if chunk_message:
				res += chunk_message
				yield chunk_message

	async def async_stream_chat(self, query: str) -> str:
		history = [{"role": "user", "content": query}]
		response = self.client.chat.completions.create(
			model=self.llm_id,
			messages=history,
			stream=False,
			temperature=0.7,
			extra_body={"enable_thinking": False},
		)
		content = response.choices[0].message.content
		return content

	def chat(self, query: str, **kwargs):
		if kwargs.get("history"):
			history = kwargs.get("history")
		else:
			history = self.deal_history(self.history)  # 只用最近几轮的记录
		history.append({"role": "user", "content": query})
		response = self.client.chat.completions.create(
			model=self.llm_id,
			messages=history,
			temperature=0.7,
			extra_body={"enable_thinking": False},
		)
		msg = response.choices[0].message.content
		logger.info(f"query: {query}, response: {msg}")
		return response.choices[0].message.content


if __name__ == '__main__':
	dsc = DeepSeekChat()
	# for i in dsc.stream_chat('介绍一下天融信这家公司'):
	# 	print(i, end='')
	print(dsc.chat("你好，你是谁", history=[]))
