import json
import threading
from json import JSONDecodeError

import requests

from ai_configs import default_ai_configs

_chat_default_ai_config = default_ai_configs["huoshan-doubao"]


def ai_chat(user_content, ai_config=None):
  return ai_chat_system(user_content=user_content, ai_config=ai_config)


def ai_chat_system(user_content, system_prompt=None, ai_config=None):
  messages = [{
    "role": "user",
    "content": user_content,
  }]

  if system_prompt:
    messages.insert(0, {
      "role": "system",
      "content": system_prompt,
    })
  return ai_chat_history(messages, ai_config)


def ai_chat_history(histories=None, ai_config=None):
  if ai_config is None:
    ai_config = _chat_default_ai_config
  try:
    # 请求头
    headers = {
      "Authorization": f"Bearer {ai_config.get("key")}",
    }
    print(f"调用模型服务：{ai_config['model']}")
    # 发送请求
    response = requests.post(ai_config["url"], headers=headers, json={
      "model": ai_config["model"],
      "messages": histories,
    })
    if response.status_code == 200:
      response_text = response.json()["choices"][0]["message"]["content"]
      if isinstance(response_text, str):
        if response_text.startswith("```json") and response_text.endswith("```"):
          response_text = response_text[len("```json"): -3]
      return response_text
    else:
      print(f"API 请求失败，状态码为{response.status_code}")
      return None
  except Exception as e:
    print(f"发生错误！错误信息：{e}")
    return None

class StreamCancellation:
  """流式调用取消控制器"""
  def __init__(self):
    self._stop_event = threading.Event()

  def stop(self):
    """请求停止流式调用"""
    self._stop_event.set()
    print("已请求停止流式调用")

  def is_stopped(self):
    """检查是否已请求停止"""
    return self._stop_event.is_set()

def chat_stream(
  histories,
  ai_config=None,
  on_receiving=None,
  on_finished=None,
  on_thinking=None,
  on_think_start=None,
  on_think_end=None,
  cancellation_token: StreamCancellation = None,
  response_format="text"
):
  if ai_config is None:
    ai_config = _chat_default_ai_config

  headers = {
    "Content-Type": "application/json",
    "Authorization": f"Bearer {ai_config['key']}"
  }

  payload = {
    "model": ai_config["model"],
    "messages": histories,
    "response_format": {"text": response_format},
    "stream": True
  }

  full_content = ""
  try:
    with requests.post(ai_config["url"], headers=headers, json=payload) as response:
      # 检查响应状态码
      response.raise_for_status()

      # 明确设置响应编码为UTF-8，解决中文乱码问题
      response.encoding = 'utf-8'

      is_thinking = False
      full_content = ""

      # 流式处理响应内容
      for line in response.iter_lines():
        # 检查是否请求停止
        if cancellation_token and cancellation_token.is_stopped():
          print("流式调用已停止")
          break
        if line:
          if line.startswith("data: ") and not line.startswith("data: [DONE]"):
            data = json.loads(line[6:])
            # 提取并处理返回的内容（这里假设返回格式为OpenAI API风格）
            if '' in data and len(data['choices']) > 0:
              delta = data['choices'][0].get('delta', {})
              if "reasoning_content" in delta:
                # 存在reasoning_content，说明正在思考
                current_thinking = True
              else:
                # 没有reasoning_content，说明已经思考结束
                current_thinking = False

              if current_thinking is True and is_thinking is False:
                is_thinking = current_thinking
                if on_think_start is not None:
                  on_think_start()
                  continue
              if current_thinking is False and is_thinking is True:
                is_thinking = current_thinking
                if on_think_end is not None:
                  on_think_end()
                  continue

              is_thinking = current_thinking

              if is_thinking is True:
                if on_thinking is not None:
                  on_thinking(delta.get('reasoning_content', ''))
                  continue

              content = delta.get('content', '')
              full_content += content
              if on_receiving is not None:
                on_receiving(full_content, content)

  except requests.exceptions.RequestException as e:
    print(f"请求异常：{e}")
  except json.JSONDecodeError as e:
    print(f"JSON解析错误：{e}")
  except Exception as e:
    print(f"发生未知错误：{e}")

  return full_content

# print(ai_chat("请用表格的方式展示过去10年美债利率变化以及当年纳斯达克的涨幅，现在是2025年", default_ai_configs["deepseek-r1"]))
# print(ai_chat("你好，你是谁", default_ai_configs["bailian-qwen-turbo"]))
# print(ai_chat_system("你好你是谁", system_prompt="你是一名专业的喜剧人，需要你按照喜剧人的风格回复用户问题"))

# print(ai_chat_history([
#   {"role": "system", "content": "你是一名专业的喜剧人，需要你按照喜剧人的风格回复用户问题"},
#   {"role": "user", "content": "我是01"},
#   {"role": "user", "content": "我刚刚说了什么"},
# ]))

def default_on_receiving(_, chunk_text):
  print(chunk_text, end="", flush=True)
