
from datetime import date, datetime
from decimal import ROUND_HALF_UP, Decimal, getcontext
from typing import Type
from cachetools import TTLCache, cached
from fastapi.concurrency import run_in_threadpool
import pandas as pd
from module_stock.entity.vo.kline_macd_vo import KlineMacdModel
from module_stock.entity.vo.macd_divergence_vo import DivergenceModel
from utils.log_util import logger
import numpy as np
from redis import Redis
import talib  # 用于技术指标计算
from module_stock.dao.daily_dao import DailyDao
from sqlalchemy.ext.asyncio import AsyncSession
from module_stock.dao.daily_macd_dao import DailyMacdDao
from module_stock.dao.kline_macd_dao import KlineMacdDao
from module_stock.dao.monthly_dao import MonthlyDao
from module_stock.dao.monthly_macd_dao import MonthlyMacdDao
from module_stock.dao.weekly_dao import WeeklyDao
from module_stock.dao.weekly_macd_dao import WeeklyMacdDao
from module_stock.entity.do.daily_macd_do import StockDailyMacd
from module_stock.entity.do.monthly_macd_do import StockMonthlyMacd
from module_stock.entity.do.weekly_macd_do import StockWeeklyMacd
from utils.stock_util import CommonUtil

class KlineMacdService():

    macd_fastperiod = 12
    macd_slowperiod = 26
    macd_signalperiod = 9
    macd_compareperiod = 26
    macd_redis_expire = 86400

    daily_macds_cache : pd.DataFrame = pd.DataFrame()
    weekly_macds_cache : pd.DataFrame = pd.DataFrame()
    monthly_macds_cache : pd.DataFrame = pd.DataFrame()

    ttl_cache = TTLCache(maxsize=100000, ttl=macd_redis_expire)

    x_types = [
        {'data_type': 'close','valley_type': 'close', 'crest_type': 'close'},
        {'data_type': 'close','valley_type': 'low', 'crest_type': 'high'},
        {'data_type': 'low','valley_type': 'low', 'crest_type': None},
        {'data_type': 'high','valley_type': None,'crest_type': 'high'}
    ]

    @classmethod  
    async def init_macd_cache(cls, query_db: AsyncSession, kline_period:str=None):  
        if not kline_period:
            cls.daily_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'daily')
            cls.weekly_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'weekly')
            cls.monthly_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'monthly') 
        else :
            match(kline_period):
                case 'daily':
                    cls.daily_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'daily')
                case 'weekly':
                    cls.weekly_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'weekly')
                case 'monthly':
                    cls.monthly_macds_cache = await KlineMacdService.get_macds_pd(query_db,None,None,'monthly')

    @classmethod
    @cached(ttl_cache)
    def get_macds_from_cache(cls, symbol:str, start_date: date, kline_period: str = None, 
                             data_type:str= None,valley_type:str= None, crest_type:str= None):
        df = pd.DataFrame()
        match(kline_period):
            case 'daily':
                df = cls.daily_macds_cache 
            case 'weekly':
                df = cls.weekly_macds_cache 
            case 'monthly':
                df = cls.monthly_macds_cache 
        if symbol:
            df = df.loc[df['symbol'] == symbol]
        if start_date:
            df = df.loc[df['trade_date'] >= start_date]
        if data_type:
            df = df.loc[df['data_type'] == data_type]
        if valley_type:
            df = df.loc[df['valley_type'] == valley_type]
        if crest_type:
            df = df.loc[df['crest_type'] == crest_type]
        return df.to_dict('records')

    @classmethod  
    def clear_macd_cache(cls, kline_period:str=None):  
        if not kline_period:
            cls.daily_macds_cache= pd.DataFrame()
            cls.weekly_macds_cache = pd.DataFrame()
            cls.monthly_macds_cache = pd.DataFrame()
        else :
            match(kline_period):
                case 'daily':
                    cls.daily_macds_cache= pd.DataFrame()
                case 'weekly':
                    cls.weekly_macds_cache = pd.DataFrame()
                case 'monthly':
                    cls.monthly_macds_cache = pd.DataFrame()


    @classmethod
    def get_kline_caches(cls, kline_period:str) :
        match(kline_period):
            case 'daily':
                return cls.daily_macds_cache
            case 'weekly':
                return cls.weekly_macds_cache
            case 'monthly':
                return cls.monthly_macds_cache

    @classmethod
    def remove_macd_cache(cls, symbol: str, kline_period = 'daily'):
        df = cls.get_kline_caches(kline_period)
        if not df.empty:
            df.drop(df[df['symbol'] == symbol].index, inplace=True)

    @classmethod
    def add_macd_cache(cls, add_data: pd.DataFrame, symbol: str, kline_period = 'daily'):
        match(kline_period):
            case 'daily':
                cls.daily_macds_cache = pd.concat([cls.daily_macds_cache, add_data], axis=0)
            case 'weekly':
                cls.weekly_macds_cache = pd.concat([cls.weekly_macds_cache, add_data], axis=0)
            case 'monthly':
                cls.monthly_macds_cache = pd.concat([cls.monthly_macds_cache, add_data], axis=0)

    @classmethod
    async def get_kline_data_pd(cls, query_db: AsyncSession, symbol: str, kline_period:str) :
        match(kline_period):
            case 'daily':
                return  (await DailyDao.get_all_daily_pd(query_db, symbol))
            case 'weekly':
                return  (await WeeklyDao.get_all_weekly_pd(query_db, symbol))
            case 'monthly':
                return  (await MonthlyDao.get_all_monthly_pd(query_db, symbol))
            
    @classmethod
    def get_kline_macd_dao(cls, kline_period:str)  -> Type[KlineMacdDao]:
        match(kline_period):
            case 'daily':
                return  DailyMacdDao
            case 'weekly':
                return  WeeklyMacdDao
            case 'monthly':
                return  MonthlyMacdDao
    
    @classmethod
    def get_kline_macd_do_model(cls, kline_period:str) :
        match(kline_period):
            case 'daily':
                return  StockDailyMacd
            case 'weekly':
                return  StockWeeklyMacd
            case 'monthly':
                return  StockMonthlyMacd
            
    @classmethod
    def del_cur_period_macd(cls, kline_period:str, macds:list) :
        if not macds:
            return
        match(kline_period):
            case 'daily':
                return 
            case 'weekly':
                if CommonUtil.is_date_in_current_week(macds[-1]['trade_date']) :
                    macds.pop()
            case 'monthly':
                if CommonUtil.is_date_in_current_month(macds[-1]['trade_date']) :
                    macds.pop()

    @classmethod
    async def init_macd(cls, query_db: AsyncSession, symbol: str, data_type:str, valley_type:str,
                        crest_type:str, kline_period = 'daily', use_cache : bool = False):
        if use_cache:
            cls.remove_macd_cache(symbol, kline_period)

        # 只存储最近的26条数据
        selected_columns = ['trade_date', 'symbol', 'open','close',
                            'low', 'high', 'ema_12', 'ema_26', 'diff', 'dea', 'macd'] 
        
        transform_colums = ['open','close', 'low', 'high', 'ema_12', 'ema_26', 'diff', 'dea', 'macd']

        """初始化计算MACD指标"""
        stock_data = await cls.get_kline_data_pd(query_db, symbol, kline_period)
        if stock_data.empty :
            return

        result = await run_in_threadpool(cls.calc_macd, data_type, valley_type, crest_type, selected_columns, transform_colums, stock_data)
        if not result:
            return 
        last_25_rows, dict_records = result

        if use_cache:
            cls.add_macd_cache(last_25_rows, symbol, kline_period)
        await cls.get_kline_macd_dao(kline_period).batch_add_macd(query_db, dict_records)
        await query_db.commit()

    @classmethod
    def calc_macd(cls, data_type, valley_type, crest_type, selected_columns, transform_colums, stock_data):
        stock_data['diff'], stock_data['dea'], stock_data['macd'] = talib.MACD(
            stock_data[data_type].astype(np.float64), fastperiod=cls.macd_fastperiod, 
            slowperiod=cls.macd_slowperiod, signalperiod=cls.macd_signalperiod)
        
        stock_data['macd_diff'] = stock_data['macd'].diff().bfill()
        
        stock_data['ema_12'] = talib.EMA(stock_data[data_type], timeperiod=cls.macd_fastperiod)
        stock_data['ema_26'] = talib.EMA(stock_data[data_type], timeperiod=cls.macd_slowperiod)

        stock_data['data_type'] = data_type
        selected_columns.append('data_type')

        if valley_type:
            # 识别价格波谷
            stock_data['price_min'] = stock_data[valley_type].rolling(
                window=cls.macd_compareperiod, min_periods=1).min()
            is_price_new_low = (stock_data[valley_type]
                                <= stock_data['price_min'])  # 当前是否为新低
            # 识别指标波谷
            stock_data['macd_min'] = stock_data['macd'].rolling(
                window=cls.macd_compareperiod, min_periods=1).min()
            is_macd_not_new_low = (
                (stock_data['macd'] > stock_data['macd_min']) & (
                    stock_data['macd'] < 0)
                & (stock_data['macd_diff'] > 0)
            )  # 指标未新低

            stock_data['bottom_divergence'] = np.where(
                (is_price_new_low) & (is_macd_not_new_low),  # 价格新低且指标未新低
                1,  # 信号触发
                0   # 无信号
            )
            stock_data['valley_type'] = valley_type
            selected_columns.append('valley_type')
            selected_columns.append('bottom_divergence')

        if crest_type:
            # 识别价格波峰
            stock_data['price_max'] = stock_data[crest_type].rolling(
                window=cls.macd_compareperiod, min_periods=1).max()
            is_price_new_high = (stock_data[crest_type]
                                >= stock_data['price_max'])  # 当前是否为新低
            
            # 识别指标波峰
            stock_data['macd_max'] = stock_data['macd'].rolling(
                window=cls.macd_compareperiod, min_periods=1).max()
            is_macd_not_new_high = (
                (stock_data['macd'] < stock_data['macd_max']) & (
                    stock_data['macd'] > 0)
                & (stock_data['macd_diff'] < 0)
            )  # 指标未新低

            stock_data['top_divergence'] = np.where(
                (is_price_new_high) & (is_macd_not_new_high),  # 价格新高且指标未新高
                1,  # 信号触发
                0   # 无信号
            )
            stock_data['crest_type'] = crest_type
            selected_columns.append('crest_type')
            selected_columns.append('top_divergence')
        
        last_26_rows = stock_data[selected_columns].tail(26).fillna(0)
        last_26_rows[transform_colums] = last_26_rows[transform_colums].map(
            lambda x: Decimal(str(x)).quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)
        )
        dict_records = last_26_rows.to_dict('records')
        return last_26_rows,dict_records


    @classmethod
    async def detect_realtime_divergence(cls, db: AsyncSession, symbol: str, 
                                        new_data: dict,
                                        redis, kline_period, data_type:str,valley_type:str, crest_type:str):
        """
        检测实时数据是否发生背离
        :param new_data: (date, open, close, low, high)
        """
        try :
            result = await run_in_threadpool(cls.calc_realtime_divergence, symbol, new_data, kline_period, data_type, valley_type, crest_type)
            if not result :
                return
            trade_date, record, bottom_divergence, top_divergence = result

            if bottom_divergence : 
                await redis.set(f'macd:rt:{trade_date}:{kline_period}:bottom_divergence:{symbol}.{data_type}.{valley_type}', record.model_dump_json(), ex=cls.macd_redis_expire) 

            if top_divergence : 
                await redis.set(f'macd:rt:{trade_date}:{kline_period}:top_divergence:{symbol}.{data_type}.{crest_type}', record.model_dump_json(), ex=cls.macd_redis_expire) 


        except Exception as e :
            logger.exception(e)

    @classmethod
    def calc_realtime_divergence(cls, symbol, new_data, kline_period, data_type, valley_type, crest_type):
        macds = cls.get_macds_from_cache(symbol,None, kline_period, data_type, valley_type, crest_type)
        cls.del_cur_period_macd(kline_period, macds)    
        if not macds:            
                # await cls.init_macd(db, symbol, kline_period, True)
                # macds  =   cls.get_macds_from_cache(symbol,None, kline_period)
                # if not macds:   
            logger.error(f"股票:{symbol}周期:{kline_period}的macd数据未初始化")
            return 

        # 准备增量数据
        last_macd_dict = macds[-1]
        last_ema_12 = last_macd_dict['ema_12']
        last_ema_26 = last_macd_dict['ema_26']
        last_dea = last_macd_dict['dea']
        last_macd = last_macd_dict['macd']

        last_9_diffs = [row['diff'] for row in macds[-9:]].copy()

        trade_date = new_data['trade_date']
        open = new_data['open']
        close = new_data['close']
        low = new_data['low']
        high = new_data['high']
                
        # 计算新EMA值
        ema_12 = cls.calc_ema(new_data[data_type], last_ema_12, 12)
        ema_26 = cls.calc_ema(new_data[data_type], last_ema_26, 26)
            
        # 计算DIFF
        diff = ema_12 - ema_26
            
        # 计算DEA
        if len(last_9_diffs) < 9:
            dea = sum(last_9_diffs) / len(last_9_diffs)
        else:
            dea = cls.calc_ema(diff, last_dea, 9)
            
        # 计算MACD柱
        macd = diff - dea
                        
        # 准备记录
        record = KlineMacdModel()
        record.symbol = symbol
        record.trade_date = trade_date
        record.open = open
        record.close = close
        record.low = low
        record.high = high
        record.ema_12 = ema_12
        record.ema_26 = ema_26
        record.diff = diff
        record.dea = dea
        record.macd = macd
        record.kline_period = kline_period
        record.data_type = data_type
        record.valley_type = valley_type
        record.crest_type = crest_type
            
        bottom_divergence = False
        # 检测底背离
        if valley_type:
            is_price_new_low = True if new_data[valley_type] <= min([row[valley_type]  for row in macds]) else False
            is_macd_not_new_low = True if (macd > last_macd) & (macd < 0) & (macd > min([row['macd'] for row in macds])) else False
            bottom_divergence = is_price_new_low & is_macd_not_new_low
            record.bottom_divergence = bottom_divergence  

        top_divergence = False
        # 检测顶背离
        if crest_type:
            is_price_new_high = True if new_data[crest_type] >= max([row[crest_type]  for row in macds]) else False
            is_macd_not_new_high = True if (macd < last_macd) & (macd > 0) & (macd < max([row['macd'] for row in macds])) else False
            top_divergence = is_price_new_high & is_macd_not_new_high
            record.top_divergence = top_divergence
        return trade_date,record,bottom_divergence,top_divergence
    
    @classmethod
    def calc_ema(cls, current_price: Decimal, last_ema: Decimal, period: int):
        """计算指数移动平均"""
        if last_ema is None:
            return current_price
        # 设置上下文精度和舍入规则
        getcontext().prec = 10  # 足够覆盖2位小数计算
        getcontext().rounding = ROUND_HALF_UP  # 银行家舍入
        
        # 转换输入确保2位小数
        last_ema = last_ema.quantize(Decimal('0.00'))
        current_price = current_price.quantize(Decimal('0.00'))
        
        # 计算平滑系数（保留6位中间精度）
        alpha = (Decimal(2) / (Decimal(period) + Decimal(1))).quantize(Decimal('0.000000'))
        
        # 计算新EMA（中间结果保留2位）
        term1 = (current_price * alpha).quantize(Decimal('0.00'))
        term2 = (last_ema * (Decimal(1) - alpha)).quantize(Decimal('0.00'))
        new_ema = (term1 + term2).quantize(Decimal('0.00'))
        
        return new_ema
    
    @classmethod
    async def clear_macds(cls, db: AsyncSession, kline_period: str):
        await cls.get_kline_macd_dao(kline_period).clear_macd_dao(db)
        await db.commit()     

    @classmethod
    async def get_macds(cls, db: AsyncSession, symbol:str, start_date: date, kline_period: str):
        result = await cls.get_kline_macd_dao(kline_period).get_macds(db, symbol, start_date)
        return result
    
    @classmethod
    async def get_macds_pd(cls, db: AsyncSession, symbol:str, start_date: date, kline_period: str):
        result = await cls.get_kline_macd_dao(kline_period).get_macds_pd(db, symbol, start_date)
        return result
    
    @classmethod
    async def get_realtime_divergences(cls, redis:Redis):
        today = datetime.now().date()
        divergenceModel = DivergenceModel()
        today_keys = await redis.keys(f'macd:rt:{today}*')
        if today_keys :
            today_values =  await redis.mget(today_keys)
            modes = [KlineMacdModel.model_validate_json(today_value) for today_value in today_values]

            divergenceModel.daily_bottoms += [model for model in modes if model.kline_period == 'daily' and model.bottom_divergence == True]
            divergenceModel.weekly_bottoms += [model for model in modes if model.kline_period == 'weekly' and model.bottom_divergence == True]
            divergenceModel.monthly_bottoms += [model for model in modes if model.kline_period == 'monthly' and model.bottom_divergence == True]
            divergenceModel.daily_tops +=  [model for model in modes if model.kline_period == 'daily' and model.top_divergence == True]
            divergenceModel.weekly_tops += [model for model in modes if model.kline_period == 'weekly' and model.top_divergence == True]
            divergenceModel.monthly_tops += [model for model in modes if model.kline_period == 'monthly' and model.top_divergence == True]
            
        return divergenceModel