"""
异步InfluxDB适配器
提供异步的时序数据库操作功能
"""

import asyncio
import logging
from typing import Dict, List, Optional, Any, Union
from datetime import datetime, timedelta
from dataclasses import dataclass
import pandas as pd

try:
    from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync
    from influxdb_client import Point, WritePrecision
    from influxdb_client.client.write_api_async import WriteApiAsync
    from influxdb_client.client.query_api_async import QueryApiAsync
    INFLUXDB_AVAILABLE = True
except ImportError:
    INFLUXDB_AVAILABLE = False
    InfluxDBClientAsync = None
    Point = None
    WritePrecision = None
    WriteApiAsync = None
    QueryApiAsync = None

from backend.redfire_framework.monitoring import timer, increment, set_gauge

# 导入 DataAdapter 基类
from ...core.base import DataAdapter

logger = logging.getLogger(__name__)

@dataclass
class InfluxDBConfig:
    """InfluxDB配置"""
    url: str
    token: str
    org: str
    bucket: str
    timeout: int = 10000
    verify_ssl: bool = True
    
    @classmethod
    def from_dict(cls, config: Dict[str, Any]) -> "InfluxDBConfig":
        """从字典创建配置对象"""
        return cls(
            url=config.get("url", ""),
            token=config.get("token", ""),
            org=config.get("org", ""),
            bucket=config.get("bucket", ""),
            timeout=config.get("timeout", 10000),
            verify_ssl=config.get("verify_ssl", True),
        )

class InfluxDBAsyncAdapter(DataAdapter):
    """异步InfluxDB适配器"""
    
    def __init__(self, config: Union[InfluxDBConfig, Dict[str, Any]]):
        # 支持字典配置或配置对象
        if isinstance(config, dict):
            self.config = InfluxDBConfig.from_dict(config)
        else:
            self.config = config
            
        self.client: Optional[InfluxDBClientAsync] = None
        self.write_api: Optional[WriteApiAsync] = None
        self.query_api: Optional[QueryApiAsync] = None
        self._connected = False
        self.logger = logging.getLogger(__name__)
        
        if not INFLUXDB_AVAILABLE:
            logger.warning("InfluxDB client not available. Install influxdb-client[async] to enable time-series functionality")
    
    async def initialize(self) -> None:
        """
        初始化适配器（AdapterManager要求的方法）
        
        这是 connect() 的别名，用于统一适配器接口
        """
        await self.connect()
    
    async def connect(self) -> None:
        """连接到InfluxDB"""
        from ..core.exceptions import AdapterConnectionError
        import time
        
        if not INFLUXDB_AVAILABLE:
            raise AdapterConnectionError("InfluxDB client not available. Install influxdb-client[async]")
            
        try:
            start_time = time.time()
            self.client = InfluxDBClientAsync(
                url=self.config.url,
                token=self.config.token,
                org=self.config.org,
                timeout=self.config.timeout,
                verify_ssl=self.config.verify_ssl
            )
            
            # 测试连接
            health = await self.client.health()
            if health.status == "pass":
                self.write_api = self.client.write_api()
                self.query_api = self.client.query_api()
                self._connected = True
                
                duration = time.time() - start_time
                from backend.redfire_framework.monitoring import observe_histogram
                observe_histogram("influxdb_connect_duration", duration)
                
                increment("influxdb_connections_success")
                set_gauge("influxdb_connected", 1)
                logger.info("Successfully connected to InfluxDB")
            else:
                increment("influxdb_connections_failed")
                raise AdapterConnectionError(f"InfluxDB health check failed: {health.status}")
                    
        except AdapterConnectionError:
            raise
        except Exception as e:
            logger.error(f"Failed to connect to InfluxDB: {e}")
            increment("influxdb_connections_failed")
            set_gauge("influxdb_connected", 0)
            raise AdapterConnectionError(f"Failed to connect to InfluxDB: {e}")
    
    async def disconnect(self):
        """断开InfluxDB连接"""
        if self.client:
            try:
                await self.client.close()
                self._connected = False
                set_gauge("influxdb_connected", 0)
                logger.info("Disconnected from InfluxDB")
            except Exception as e:
                logger.error(f"Error disconnecting from InfluxDB: {e}")
    
    async def cleanup(self) -> None:
        """
        清理资源
        
        这是 disconnect() 的别名，用于统一服务生命周期管理接口
        """
        await self.disconnect()
    
    @property
    def is_connected(self) -> bool:
        """检查是否已连接"""
        return self._connected and self.client is not None
    
    async def write_market_data(self, symbol: str, data: Dict[str, Any], timestamp: Optional[datetime] = None) -> bool:
        """写入市场数据"""
        import time
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot write data")
            return False
            
        try:
            start_time = time.time()
            point = Point("market_data") \
                .tag("symbol", symbol) \
                .time(timestamp or datetime.utcnow(), WritePrecision.MS)
            
            # 添加数值字段
            for key, value in data.items():
                if isinstance(value, (int, float)):
                    point = point.field(key, float(value))
                elif isinstance(value, str):
                    point = point.tag(key, value)
            
            await self.write_api.write(
                bucket=self.config.bucket,
                org=self.config.org,
                record=point
            )
            
            duration = time.time() - start_time
            from backend.redfire_framework.monitoring import observe_histogram
            observe_histogram("influxdb_write_duration", duration)
            
            increment("influxdb_writes_success")
            logger.debug(f"Successfully wrote market data for {symbol}")
            return True
                
        except Exception as e:
            logger.error(f"Failed to write market data for {symbol}: {e}")
            increment("influxdb_writes_failed")
            return False
    
    async def write_batch_market_data(self, data_points: List[Dict[str, Any]]) -> int:
        """批量写入市场数据"""
        import time
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot write batch data")
            return 0
            
        try:
            start_time = time.time()
            points = []
            
            for data_point in data_points:
                symbol = data_point.get('symbol')
                timestamp = data_point.get('timestamp', datetime.utcnow())
                fields = data_point.get('fields', {})
                tags = data_point.get('tags', {})
                
                if not symbol:
                    continue
                    
                point = Point("market_data") \
                    .tag("symbol", symbol) \
                    .time(timestamp, WritePrecision.MS)
                
                # 添加标签
                for key, value in tags.items():
                    point = point.tag(key, str(value))
                
                # 添加字段
                for key, value in fields.items():
                    if isinstance(value, (int, float)):
                        point = point.field(key, float(value))
                
                points.append(point)
            
            if points:
                await self.write_api.write(
                    bucket=self.config.bucket,
                    org=self.config.org,
                    record=points
                )
                
                duration = time.time() - start_time
                from backend.redfire_framework.monitoring import observe_histogram
                observe_histogram("influxdb_batch_write_duration", duration)
                
                increment("influxdb_batch_writes_success")
                increment("influxdb_points_written", len(points))
                logger.info(f"Successfully wrote {len(points)} data points to InfluxDB")
                return len(points)
            else:
                logger.warning("No valid data points to write")
                return 0
                    
        except Exception as e:
            logger.error(f"Failed to write batch market data: {e}")
            increment("influxdb_batch_writes_failed")
            return 0
    
    async def query_market_data(self, symbol: str, start_time: datetime, end_time: Optional[datetime] = None, 
                               fields: Optional[List[str]] = None) -> Optional[pd.DataFrame]:
        """查询市场数据"""
        import time
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot query data")
            return None
            
        try:
            start_time_timer = time.time()
            end_time = end_time or datetime.utcnow()
            
            # 构建查询
            field_filter = ""
            if fields:
                field_conditions = [f'r._field == "{field}"' for field in fields]
                field_filter = f'|> filter(fn: (r) => {" or ".join(field_conditions)})'
            
            query = f'''
            from(bucket: "{self.config.bucket}")
            |> range(start: {start_time.isoformat()}Z, stop: {end_time.isoformat()}Z)
            |> filter(fn: (r) => r._measurement == "market_data")
            |> filter(fn: (r) => r.symbol == "{symbol}")
            {field_filter}
            |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
            '''
            
            tables = await self.query_api.query(query, org=self.config.org)
            
            if not tables:
                logger.debug(f"No data found for {symbol} in time range")
                return None
            
            # 转换为DataFrame
            records = []
            for table in tables:
                for record in table.records:
                    record_dict = record.values.copy()
                    record_dict['time'] = record.get_time()
                    records.append(record_dict)
            
            duration = time.time() - start_time_timer
            from backend.redfire_framework.monitoring import observe_histogram
            observe_histogram("influxdb_query_duration", duration)
            
            if records:
                df = pd.DataFrame(records)
                increment("influxdb_queries_success")
                logger.debug(f"Successfully queried {len(records)} records for {symbol}")
                return df
            else:
                logger.debug(f"No records found for {symbol}")
                return None
                    
        except Exception as e:
            logger.error(f"Failed to query market data for {symbol}: {e}")
            increment("influxdb_queries_failed")
            return None
    
    async def get_latest_data(self, symbol: str, fields: Optional[List[str]] = None) -> Optional[Dict[str, Any]]:
        """获取最新数据"""
        import time
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot get latest data")
            return None
            
        try:
            start_time = time.time()
            field_filter = ""
            if fields:
                field_conditions = [f'r._field == "{field}"' for field in fields]
                field_filter = f'|> filter(fn: (r) => {" or ".join(field_conditions)})'
            
            query = f'''
            from(bucket: "{self.config.bucket}")
            |> range(start: -24h)
            |> filter(fn: (r) => r._measurement == "market_data")
            |> filter(fn: (r) => r.symbol == "{symbol}")
            {field_filter}
            |> last()
            |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
            '''
            
            tables = await self.query_api.query(query, org=self.config.org)
            
            duration = time.time() - start_time
            from backend.redfire_framework.monitoring import observe_histogram
            observe_histogram("influxdb_latest_query_duration", duration)
            
            if tables and tables[0].records:
                record = tables[0].records[0]
                result = record.values.copy()
                result['time'] = record.get_time()
                
                increment("influxdb_latest_queries_success")
                return result
            else:
                logger.debug(f"No latest data found for {symbol}")
                return None
                    
        except Exception as e:
            logger.error(f"Failed to get latest data for {symbol}: {e}")
            increment("influxdb_latest_queries_failed")
            return None
    
    async def health_check(self) -> bool:
        """健康检查"""
        if not self.is_connected:
            return False
            
        try:
            health = await self.client.health()
            is_healthy = health.status == "pass"
            set_gauge("influxdb_healthy", 1 if is_healthy else 0)
            return is_healthy
        except Exception as e:
            logger.error(f"InfluxDB health check failed: {e}")
            set_gauge("influxdb_healthy", 0)
            return False
    
    # =================================================================
    # 交易日历相关方法
    # =================================================================
    
    async def write_trade_calendar(self, cal_date: datetime, exchange: str, is_open: bool) -> bool:
        """写入交易日历数据"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot write trade calendar")
            return False
            
        try:
            point = Point("trade_calendar") \
                .tag("exchange", exchange) \
                .field("is_open", 1 if is_open else 0) \
                .time(cal_date, WritePrecision.MS)
            
            await self.write_api.write(
                bucket=self.config.bucket,
                org=self.config.org,
                record=point
            )
            
            logger.debug(f"Wrote trade calendar for {cal_date.date()} {exchange}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to write trade calendar: {e}")
            return False
    
    async def batch_write_trade_calendar(self, calendar_data: List[Dict[str, Any]]) -> int:
        """批量写入交易日历"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot write trade calendar")
            return 0
            
        try:
            points = []
            for data in calendar_data:
                cal_date = data.get('cal_date')
                if isinstance(cal_date, str):
                    cal_date = datetime.strptime(cal_date, '%Y%m%d')
                elif not isinstance(cal_date, datetime):
                    continue
                    
                point = Point("trade_calendar") \
                    .tag("exchange", data.get('exchange', 'SSE')) \
                    .field("is_open", 1 if data.get('is_open') else 0) \
                    .time(cal_date, WritePrecision.MS)
                points.append(point)
            
            if points:
                await self.write_api.write(
                    bucket=self.config.bucket,
                    org=self.config.org,
                    record=points
                )
                logger.info(f"Batch wrote {len(points)} trade calendar records")
                return len(points)
            return 0
            
        except Exception as e:
            logger.error(f"Failed to batch write trade calendar: {e}")
            return 0
    
    async def check_trading_day(self, cal_date: datetime, exchange: str = 'SSE') -> Optional[bool]:
        """检查是否为交易日"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot check trading day")
            return None
            
        try:
            # 查询指定日期的交易日历
            query = f'''
            from(bucket: "{self.config.bucket}")
            |> range(start: {cal_date.replace(hour=0, minute=0, second=0).isoformat()}Z, 
                     stop: {cal_date.replace(hour=23, minute=59, second=59).isoformat()}Z)
            |> filter(fn: (r) => r._measurement == "trade_calendar")
            |> filter(fn: (r) => r.exchange == "{exchange}")
            |> filter(fn: (r) => r._field == "is_open")
            |> last()
            '''
            
            tables = await self.query_api.query(query, org=self.config.org)
            
            if tables and tables[0].records:
                is_open = tables[0].records[0].get_value()
                return bool(is_open)
            return None
            
        except Exception as e:
            logger.error(f"Failed to check trading day: {e}")
            return None
    
    async def get_last_trading_day(self, before_date: datetime, exchange: str = 'SSE') -> Optional[datetime]:
        """获取指定日期前的最后一个交易日"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected")
            return None
            
        try:
            # 查询过去30天内的交易日
            start_date = before_date - timedelta(days=30)
            query = f'''
            from(bucket: "{self.config.bucket}")
            |> range(start: {start_date.isoformat()}Z, stop: {before_date.isoformat()}Z)
            |> filter(fn: (r) => r._measurement == "trade_calendar")
            |> filter(fn: (r) => r.exchange == "{exchange}")
            |> filter(fn: (r) => r._field == "is_open")
            |> filter(fn: (r) => r._value == 1)
            |> last()
            '''
            
            tables = await self.query_api.query(query, org=self.config.org)
            
            if tables and tables[0].records:
                return tables[0].records[0].get_time()
            return None
            
        except Exception as e:
            logger.error(f"Failed to get last trading day: {e}")
            return None
    
    # =================================================================
    # 指数数据相关方法
    # =================================================================
    
    async def write_index_daily_data(self, ts_code: str, data: Dict[str, Any]) -> bool:
        """写入指数日线数据"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected, cannot write index data")
            return False
            
        try:
            trade_date = data.get('trade_date')
            if isinstance(trade_date, str):
                trade_date = datetime.strptime(trade_date, '%Y%m%d')
            elif not isinstance(trade_date, datetime):
                logger.error(f"Invalid trade_date: {trade_date}")
                return False
            
            point = Point("index_daily") \
                .tag("ts_code", ts_code) \
                .time(trade_date, WritePrecision.MS)
            
            # 添加数值字段
            fields = ['open', 'high', 'low', 'close', 'pre_close', 'change', 'pct_chg', 'vol', 'amount']
            for field in fields:
                value = data.get(field)
                if value is not None and isinstance(value, (int, float)):
                    point = point.field(field, float(value))
            
            await self.write_api.write(
                bucket=self.config.bucket,
                org=self.config.org,
                record=point
            )
            
            logger.debug(f"Wrote index daily data for {ts_code} on {trade_date.date()}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to write index daily data: {e}")
            return False
    
    async def batch_write_index_daily_data(self, ts_code: str, data_list: List[Dict[str, Any]]) -> int:
        """批量写入指数日线数据"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected")
            return 0
            
        try:
            points = []
            for data in data_list:
                trade_date = data.get('trade_date')
                if isinstance(trade_date, str):
                    trade_date = datetime.strptime(trade_date, '%Y%m%d')
                elif not isinstance(trade_date, datetime):
                    continue
                
                point = Point("index_daily") \
                    .tag("ts_code", ts_code) \
                    .time(trade_date, WritePrecision.MS)
                
                # 添加数值字段
                fields = ['open', 'high', 'low', 'close', 'pre_close', 'change', 'pct_chg', 'vol', 'amount']
                for field in fields:
                    value = data.get(field)
                    if value is not None and isinstance(value, (int, float)):
                        point = point.field(field, float(value))
                
                points.append(point)
            
            if points:
                await self.write_api.write(
                    bucket=self.config.bucket,
                    org=self.config.org,
                    record=points
                )
                logger.info(f"Batch wrote {len(points)} index daily records for {ts_code}")
                return len(points)
            return 0
            
        except Exception as e:
            logger.error(f"Failed to batch write index daily data: {e}")
            return 0
    
    async def query_index_daily_data(self, ts_code: str, start_date: datetime, 
                                    end_date: datetime) -> Optional[pd.DataFrame]:
        """查询指数日线数据"""
        if not self.is_connected:
            logger.warning("InfluxDB not connected")
            return None
            
        try:
            query = f'''
            from(bucket: "{self.config.bucket}")
            |> range(start: {start_date.isoformat()}Z, stop: {end_date.isoformat()}Z)
            |> filter(fn: (r) => r._measurement == "index_daily")
            |> filter(fn: (r) => r.ts_code == "{ts_code}")
            |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
            |> sort(columns: ["_time"], desc: true)
            '''
            
            tables = await self.query_api.query(query, org=self.config.org)
            
            if not tables or not tables[0].records:
                logger.debug(f"No index data found for {ts_code}")
                return None
            
            # 转换为DataFrame
            records = []
            for record in tables[0].records:
                record_dict = {
                    'ts_code': ts_code,
                    'trade_date': record.get_time(),
                    'open': record.values.get('open'),
                    'high': record.values.get('high'),
                    'low': record.values.get('low'),
                    'close': record.values.get('close'),
                    'pre_close': record.values.get('pre_close'),
                    'change': record.values.get('change'),
                    'pct_chg': record.values.get('pct_chg'),
                    'vol': record.values.get('vol'),
                    'amount': record.values.get('amount'),
                }
                records.append(record_dict)
            
            if records:
                df = pd.DataFrame(records)
                logger.debug(f"Queried {len(records)} index daily records for {ts_code}")
                return df
            return None
            
        except Exception as e:
            logger.error(f"Failed to query index daily data: {e}")
            return None

# 全局实例
_influxdb_adapter: Optional[InfluxDBAsyncAdapter] = None

def get_influxdb_adapter() -> Optional[InfluxDBAsyncAdapter]:
    """获取InfluxDB适配器实例"""
    return _influxdb_adapter

def set_influxdb_adapter(adapter: InfluxDBAsyncAdapter):
    """设置InfluxDB适配器实例"""
    global _influxdb_adapter
    _influxdb_adapter = adapter

async def init_influxdb_adapter(config: InfluxDBConfig) -> bool:
    """初始化InfluxDB适配器"""
    adapter = InfluxDBAsyncAdapter(config)
    success = await adapter.connect()
    if success:
        set_influxdb_adapter(adapter)
    return success
