# %% __CONDA_RUN_ARGS__ = ["--no-capture-output"]
"""
分时图实时数据获取器对象
    用于获取分时图成交数据
    分时图的数据由服务器每隔3s通过sse推送一次
    sse的链接不能重复像常规的get请求一样频繁发送到服务器，只需要建立双方管道就可以
    客户端不断接收数据即可
    conda会缓存子进程的输出，需要设置
"""
import json
import threading
import requests
import time
import logging
from QFinanceGridModel.base import Url, Headers  # 确保这个模块存在

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.StreamHandler()
    ]
)
logger = logging.getLogger('RealTimeAcquirer')


class RealTimeAcquirer:
    def __init__(self, secid) -> None:
        """
        类初始化
            secid是个股的市场代码
            格式：市场标志.个股代码
            市场标志 -> 0表示深交所,1表示上交所
        """
        self.secid = secid
        self.market, self.code = secid.split('.')
        self.running = False
        self.data_lock = threading.Lock()  # 线程锁，保护共享数据
        self.historical_ticks = []  # 存储历史分笔数据
        self.reconnect_interval = 5  # 重连间隔(秒)
        self.timeout = 30  # 请求超时时间(秒)
        self.last_data_time = None  # 记录最后收到数据的时间

    def start_get_realtime_data(self):
        if self.running:
            logger.info(f"[{self.code}] 监控已在运行中")
            return
        logger.info(f"[{self.code}] 启动监控线程")
        self.running = True
        self.monitor_thread = threading.Thread(
            target=self.__stream_realtime_stock_data,
            name=f"Acquirer-{self.code}"
        )
        self.monitor_thread.daemon = True
        self.monitor_thread.start()

    def stop_get_realtime_data(self):
        """停止监控"""
        if not self.running:
            return

        logger.info(f"[{self.code}] 正在停止监控...")
        self.running = False
        if hasattr(self, 'monitor_thread') and self.monitor_thread.is_alive():
            self.monitor_thread.join(timeout=2)
            logger.info(f"[{self.code}] 监控已停止")

    def __stream_realtime_stock_data(self):
        """核心方法：建立SSE连接并持续处理数据流"""
        i = 0
        while self.running:
            try:
                url = str(Url(type="5", secid=self.secid).base_url)
                headers = Headers(type="5").headers
                logger.info(f"[{self.code}] 正在连接SSE服务器")

                # 使用更长的超时时间
                with requests.get(url, headers=headers, stream=True, timeout=self.timeout) as response:
                    response.raise_for_status()
                    logger.info(f"[{self.code}] SSE连接成功，开始接收数据流")

                    for line in response.iter_lines():
                        if not self.running:
                            logger.info(f"[{self.code}] 监控已停止，退出循环")
                            break

                        # 处理心跳包（空行）
                        if not line:
                            # 空行是SSE的心跳包，跳过但不退出
                            # print("空行")
                            continue

                        try:
                            decoded_line = line.decode('utf-8')
                            logger.debug(f"收到原始数据: {decoded_line}")

                            if decoded_line.startswith('data:'):
                                json_str = decoded_line[5:].strip()
                                data = json.loads(json_str)

                                if data.get("data") and data["data"].get("details"):
                                    new_data = data["data"].get("details")
                                    # 只处理最新的一条数据
                                    tick_data = new_data[-1].split(",")
                                    self.last_data_time = tick_data[0]
                                    logger.info(f"--时间 {tick_data[0]} -- 价格 {tick_data[1]} -- 成交量 {tick_data[2]} -- 方向 {'买入' if tick_data[4] == '2' else '卖出'} ")

                                    # 存储到历史数据（线程安全）
                                    with self.data_lock:
                                        self.historical_ticks.append({
                                            "time": tick_data[0],
                                            "price": float(tick_data[1]),
                                            "volume": int(tick_data[2]),
                                            "direction": tick_data[4]
                                        })
                                else:
                                    logger.info(f"收到无有效数据的内容: {data}")
                            else:
                                logger.info(f"收到非数据行: {decoded_line}")

                        except json.JSONDecodeError as e:
                            logger.error(f"JSON解析失败: {e}\n原始数据: {decoded_line}")
                        except Exception as e:
                            logger.error(f"数据处理异常: {str(e)}", exc_info=True)

            except requests.exceptions.RequestException as e:
                logger.error(f"[{self.code}] 请求异常: {str(e)}，{self.reconnect_interval}秒后重连...")
            except Exception as e:
                logger.error(f"[{self.code}] 未知错误: {str(e)}", exc_info=True)

            # 等待重连
            if self.running:
                logger.info(f"[{self.code}] 连接断开，{self.reconnect_interval}秒后重连...")

                time.sleep(self.reconnect_interval)


if __name__ == "__main__":
    # 示例使用
    logger.info("程序启动...")
    acquirer = RealTimeAcquirer("1.601288")
    acquirer.start_get_realtime_data()

    try:
        # 主线程保持运行
        last_log_time = time.time()
        while True:
            # 每30秒打印一次状态
            if time.time() - last_log_time > 30:
                logger.info(f"[主线程] 程序运行中... 最后数据时间: {acquirer.last_data_time}")
                last_log_time = time.time()
            time.sleep(1)
    except KeyboardInterrupt:
        logger.info("\n用户中断，停止监控...")
        acquirer.stop_get_realtime_data()
    except Exception as e:
        logger.error(f"主程序异常: {str(e)}", exc_info=True)
    finally:
        logger.info("程序退出")
