from sqlalchemy.orm import Session
from sqlalchemy import text, desc
from models import MerchantMetadata
from database import create_merchant_data_table
import json
from typing import Dict, Any, List, Optional, Tuple
from utils.logger import setup_logger
import re
from collections import defaultdict

# 使用统一的日志管理器
logger = setup_logger('data_service', 'logs/data_service.log')

def get_or_create_merchant_metadata(db: Session, merchant_id: int, merchant_name: str, platform_type: str):
    """获取或创建商户元数据"""
    try:
        # 将merchant_id转换为字符串
        merchant_id_str = str(merchant_id)
        logger.info(f"开始处理商户元数据: merchant_id={merchant_id_str}, merchant_name={merchant_name}")
        logger.debug(f"商户元数据参数: platform_type={platform_type}")
        
        # 首先查找当前平台的记录
        metadata = db.query(MerchantMetadata).filter(
            MerchantMetadata.merchant_id == merchant_id_str,
            MerchantMetadata.platform_type == platform_type
        ).first()
        
        if not metadata:
            logger.info("未找到当前平台的记录，检查是否存在其他平台的记录")
            # 查找相同merchant_id的其他平台记录
            existing_metadata = db.query(MerchantMetadata).filter(
                MerchantMetadata.merchant_id == merchant_id_str
            ).first()
            
            if existing_metadata:
                logger.info(f"找到其他平台的记录，将使用其表名: {existing_metadata.data_table_name}")
                # 创建新记录，但使用已存在的表名
                metadata = MerchantMetadata(
                    merchant_id=merchant_id_str,
                    merchant_name=merchant_name,
                    platform_type=platform_type
                )
                # 设置表名
                metadata.data_table_name = existing_metadata.data_table_name
            else:
                logger.info("创建新的商户元数据记录")
                metadata = MerchantMetadata(
                    merchant_id=merchant_id_str,
                    merchant_name=merchant_name,
                    platform_type=platform_type
                )
                # 创建商户数据表
                logger.info(f"开始创建商户数据表: {metadata.data_table_name}")
                create_merchant_data_table(metadata.data_table_name)
                logger.info("商户数据表创建完成")
            
            db.add(metadata)
            db.commit()
            db.refresh(metadata)
        else:
            logger.info(f"找到已存在的商户元数据记录: id={metadata.id}")
            metadata_info = {
                'id': metadata.id,
                'merchant_id': metadata.merchant_id,
                'merchant_name': metadata.merchant_name,
                'platform_type': metadata.platform_type,
                'data_table_name': metadata.data_table_name
            }
            logger.debug(f"商户元数据详情: {json.dumps(metadata_info, ensure_ascii=False)}")
        
        return metadata
    except Exception as e:
        logger.error(f"处理商户元数据时出错: {str(e)}", exc_info=True)
        raise

def insert_merchant_data(db: Session, table_name: str, data: dict):
    """插入商户数据"""
    try:
        logger.debug(f"准备插入数据到表 {table_name}: {json.dumps(data, ensure_ascii=False)}")
        
        # 首先检查记录是否存在
        check_sql = f"""
        SELECT id FROM {table_name} 
        WHERE topic = :topic 
        AND group_name = :group_name 
        AND metric_name = :metric_name 
        AND metric_subitem = :metric_subitem
        AND data_period = :data_period
        """
        logger.debug(f"执行查询SQL: {check_sql}")
        logger.debug(f"查询参数: {json.dumps(data, ensure_ascii=False)}")
        
        result = db.execute(text(check_sql), data).first()
        
        if result:
            logger.info(f"记录已存在，准备更新: id={result[0]}")
            # 如果记录存在，更新它
            update_sql = f"""
            UPDATE {table_name}
            SET metric_value = :metric_value,
                data_period = :data_period,
                remark = :remark,
                modified_at = CURRENT_TIMESTAMP
            WHERE id = :id
            """
            data['id'] = result[0]
            logger.debug(f"执行更新SQL: {update_sql}")
            logger.debug(f"更新参数: {json.dumps(data, ensure_ascii=False)}")
            db.execute(text(update_sql), data)
            logger.info("数据更新成功")
        else:
            logger.info("记录不存在，准备插入新记录")
            # 如果记录不存在，插入新记录
            insert_sql = f"""
            INSERT INTO {table_name} 
            (topic, group_name, metric_name, metric_subitem, metric_value, data_period, remark)
            VALUES 
            (:topic, :group_name, :metric_name, :metric_subitem, :metric_value, :data_period, :remark)
            """
            logger.debug(f"执行插入SQL: {insert_sql}")
            logger.debug(f"插入参数: {json.dumps(data, ensure_ascii=False)}")
            db.execute(text(insert_sql), data)
            logger.info("数据插入成功")
        
        db.commit()
    except Exception as e:
        logger.error(f"插入数据失败: {str(e)}", exc_info=True)
        db.rollback()
        raise Exception(f"插入数据失败: {str(e)}")

def process_metric_data(db: Session, table_name: str, group_title: str, metric_data: dict, data_period: str):
    """处理单个指标数据"""
    try:
        logger.debug(f"开始处理指标数据: group_title={group_title}, metric_data={json.dumps(metric_data, ensure_ascii=False)}")
        
        # 处理value值
        if 'value' in metric_data:
            value = metric_data['value']
            remark = None
            
            # 如果value是列表，将实际内容保存到remark
            if isinstance(value, list):
                remark = json.dumps(value, ensure_ascii=False)
                value = "<list>"
            
            data = {
                'topic': '默认',
                'group_name': group_title,
                'metric_name': metric_data['label'],
                'metric_subitem': 'value',
                'metric_value': value,
                'data_period': data_period,
                'remark': remark
            }
            insert_merchant_data(db, table_name, data)
        
        # 处理其他子项
        for key, value in metric_data.items():
            # 忽略type类型的metric_subitem
            if key == 'type':
                continue
                
            if key not in ['label', 'value'] and value != '--':
                remark = None
                
                # 如果value是列表，将实际内容保存到remark
                if isinstance(value, list):
                    remark = json.dumps(value, ensure_ascii=False)
                    value = "<list>"
                
                data = {
                    'topic': '默认',
                    'group_name': group_title,
                    'metric_name': metric_data['label'],
                    'metric_subitem': key,
                    'metric_value': value,
                    'data_period': data_period,
                    'remark': remark
                }
                insert_merchant_data(db, table_name, data)
        
        logger.info("指标数据处理完成")
    except Exception as e:
        logger.error(f"处理指标数据失败: {str(e)}", exc_info=True)
        raise

def process_upload_data(db: Session, merchant_id: int, merchant_name: str, platform_type: str, data_period: str, json_data: str):
    """处理上传的数据"""
    try:
        logger.info(f"开始处理上传数据: merchant_id={merchant_id}, data_period={data_period}")
        logger.debug(f"请求参数: merchant_name={merchant_name}, platform_type={platform_type}")
        logger.debug(f"原始JSON数据: {json_data}")
        
        # 解析JSON数据
        data_list = json.loads(json_data)
        logger.info(f"JSON数据解析成功，包含 {len(data_list)} 个分组")
        logger.debug(f"解析后的数据结构: {json.dumps(data_list, ensure_ascii=False)}")
        
        # 获取或创建商户元数据
        metadata = get_or_create_merchant_metadata(db, merchant_id, merchant_name, platform_type)
        
        # 处理数据
        for group in data_list:
            if 'cards' in group and group['cards']:
                logger.info(f"处理分组: {group['groupTitle']}, 包含 {len(group['cards'])} 个指标")
                logger.debug(f"分组详情: {json.dumps(group, ensure_ascii=False)}")
                for metric in group['cards']:
                    process_metric_data(db, metadata.data_table_name, group['groupTitle'], metric, data_period)
            
        logger.info("数据上传处理完成")
        return True
    except Exception as e:
        logger.error(f"处理数据失败: {str(e)}", exc_info=True)
        raise Exception(f"处理数据失败: {str(e)}")

def get_merchant_list(db: Session, filters: Dict[str, Any], page_num: int, page_size: int) -> Dict[str, Any]:
    """
    获取商户列表
    
    Args:
        db: 数据库会话
        filters: 过滤条件
        page_num: 页码
        page_size: 每页记录数
        
    Returns:
        商户列表
    """
    # 构建查询条件
    query = db.query(MerchantMetadata)
    
    # 应用过滤条件
    for key, value in filters.items():
        if key == "name" and value:
            # 名称模糊查询
            query = query.filter(MerchantMetadata.merchant_name.like(f"%{value}%"))
        elif key == "platform_type" and value:
            # 平台类型精确匹配
            query = query.filter(MerchantMetadata.platform_type == value)
    
    # 获取所有符合条件的记录
    merchants = query.all()
    
    # 使用字典进行去重，以dataTableName为key
    # 当有多个记录时，优先选择platform_type为"抖音生意经"的记录
    unique_merchants = {}
    for merchant in merchants:
        table_name = merchant.data_table_name
        if table_name not in unique_merchants:
            unique_merchants[table_name] = merchant
        elif merchant.platform_type == "抖音生意经":
            # 如果当前记录是抖音生意经，则替换已有记录
            unique_merchants[table_name] = merchant
    
    # 转换为列表并排序
    merchant_list = list(unique_merchants.values())
    merchant_list.sort(key=lambda x: x.merchant_id)
    
    # 获取总记录数（去重后）
    total = len(merchant_list)
    
    # 手动分页
    start_idx = (page_num - 1) * page_size
    end_idx = start_idx + page_size
    paged_merchants = merchant_list[start_idx:end_idx]
    
    # 构建返回数据
    result_list = []
    for merchant in paged_merchants:
        result_list.append({
            "merchantId": int(merchant.merchant_id),
            "name": merchant.merchant_name,
            "code": merchant.merchant_id,
            "platformType": merchant.platform_type,
            "dataTableName": merchant.data_table_name,
            "createTime": merchant.created_at.strftime("%Y-%m-%d %H:%M:%S")
        })
    
    return {
        "total": total,
        "pageNum": page_num,
        "pageSize": page_size,
        "list": result_list
    }

def process_value(value: Any) -> Any:
    """
    处理数据值，进行清理和转换
    
    Args:
        value: 原始数据值
        
    Returns:
        处理后的数据值
    """
    if not isinstance(value, str):
        return value
        
    # 清理字符串
    value = value.lstrip('￥')  # 去掉开头的￥符号
    value = value.lstrip('¥')  # 去掉开头的¥符号
    
    return value

def parse_path_and_value(path: str, raw_value: Any) -> Tuple[Optional[int], str, Any]:
    """
    解析 target_path，返回：index（可为None）、字段名、是否为静态值
    """
    if path == "value":
        return None, "value", raw_value  # 直接整体赋值

    # 处理默认值
    if '=' in path:
        path, fixed_value = path.split('=', 1)
        fixed_value = fixed_value.strip()
    else:
        fixed_value = None

    # 匹配数组路径：value[0].字段
    match_array = re.match(r'value\[(\d+)]\.(.+)', path)
    if match_array:
        index = int(match_array.group(1))
        field = match_array.group(2)
    else:
        # 匹配对象路径：value.字段
        match_obj = re.match(r'value\.(.+)', path)
        if match_obj:
            index = None
            field = match_obj.group(1)
        else:
            raise ValueError(f"不支持的路径格式: {path}")

    return index, field, fixed_value

def build_structured_data(latest_data, metric_mapping) -> List[Dict[str, Any]]:
    """
    构建结构化数据，兼容数组、对象、默认值、整体value赋值等多种路径规则
    """
    grouped = defaultdict(lambda: defaultdict(lambda: None))  # group_title -> metric_name -> value

    # 首先处理所有默认值配置
    for source_key, (target_group, target_metric, target_path) in metric_mapping.items():
        if source_key[3].startswith("<默认>"):  # source_metric_subitem 以 <默认> 开头
            # 解析路径和默认值
            index, field_name, override_value = parse_path_and_value(target_path, None)
            
            # 如果路径中没有指定默认值，则使用空值
            if override_value is None:
                override_value = ""
            
            # 根据索引类型处理
            if index is None:
                # 对象结构
                if grouped[target_group][target_metric] is None:
                    grouped[target_group][target_metric] = {}
                grouped[target_group][target_metric][field_name] = override_value
            else:
                # 数组结构
                if grouped[target_group][target_metric] is None:
                    grouped[target_group][target_metric] = []
                value_array = grouped[target_group][target_metric]
                while len(value_array) <= index:
                    value_array.append({})
                value_array[index][field_name] = override_value

    # 然后处理实际数据
    for data in latest_data:
        source_key = (data.topic, data.group_name, data.metric_name, data.metric_subitem)
        if source_key not in metric_mapping:
            continue
        
        target_group, target_metric, target_path = metric_mapping[source_key]

        # 获取 metric_value（包括特殊值 <list>）
        raw_value = data.remark if data.metric_value == "<list>" else data.metric_value
        try:
            parsed_value = json.loads(raw_value)
        except Exception:
            parsed_value = raw_value

        # 处理整体value（不解析字段）
        if target_path == "value":
            grouped[target_group][target_metric] = parsed_value
            continue

        # 否则解析路径：可能带默认值，也可能是 value[0].字段名
        index, field_name, override_value = parse_path_and_value(target_path, parsed_value)
        final_value = override_value if override_value is not None else parsed_value

        # === 对象结构 ===
        if index is None:
            if grouped[target_group][target_metric] is None or isinstance(grouped[target_group][target_metric], dict):
                if grouped[target_group][target_metric] is None:
                    grouped[target_group][target_metric] = {}
                grouped[target_group][target_metric][field_name] = final_value
            else:
                raise ValueError(f"字段 '{target_metric}' 结构混用：对象和数组冲突")

        # === 数组结构 ===
        else:
            if grouped[target_group][target_metric] is None:
                grouped[target_group][target_metric] = []
            elif not isinstance(grouped[target_group][target_metric], list):
                raise ValueError(f"字段 '{target_metric}' 结构混用：数组和对象冲突")

            value_array = grouped[target_group][target_metric]
            while len(value_array) <= index:
                value_array.append({})
            value_array[index][field_name] = final_value

    # 构造最终结构
    result = []
    for group_title, metric_dict in grouped.items():
        items = []
        for metric_name, value in metric_dict.items():
            items.append({
                "name": metric_name,
                "value": value
            })
        result.append({
            "title": group_title,
            "items": items
        })

    return result

def get_merchant_data(db: Session, merchant_id: int, template_id: Optional[int] = None) -> Dict[str, Any]:
    """
    获取指定商户的数据
    
    Args:
        db: 数据库会话
        merchant_id: 商户ID
        template_id: 模板ID（可选）
        
    Returns:
        商户数据
    """
    # 调试开关：True表示返回样例数据，False表示返回真实数据
    USE_SAMPLE_DATA = merchant_id == 123
    
    if USE_SAMPLE_DATA:
        try:
            with open('数据样例.json', 'r', encoding='utf-8') as f:
                return json.load(f)
        except Exception as e:
            logger.error(f"读取样例数据失败: {str(e)}", exc_info=True)
            raise Exception(f"读取样例数据失败: {str(e)}")
    
    try:
        # 将merchant_id转换为字符串
        merchant_id_str = str(merchant_id)
        
        # 获取商户元数据
        metadata = db.query(MerchantMetadata).filter(
            MerchantMetadata.merchant_id == merchant_id_str
        ).first()
        
        if not metadata:
            raise Exception(f"未找到商户ID为{merchant_id}的元数据")
        
        # 获取最近一个时间周期的数据
        # 解析data_period格式：type|start_date|end_date，按结束日期排序获取最新数据
        latest_data_sql = f"""
        SELECT topic, group_name, metric_name, metric_subitem, metric_value, data_period, remark
        FROM {metadata.data_table_name}
        WHERE data_period = (
            SELECT data_period
            FROM {metadata.data_table_name}
            ORDER BY 
                CASE 
                    WHEN data_period LIKE '%|%|%' THEN 
                        STR_TO_DATE(SUBSTRING_INDEX(data_period, '|', -1), '%Y-%m-%d')
                    ELSE 
                        STR_TO_DATE(data_period, '%Y-%m-%d')
                END DESC
            LIMIT 1
        )
        """
        
        latest_data = db.execute(text(latest_data_sql)).fetchall()
        
        # 获取数据映射关系
        mapping_sql = """
        SELECT source_topic, source_group_name, source_metric_name, source_metric_subitem,
               target_group_title, target_metric_name, target_metric_path
        FROM data_mapping
        """
        mappings = db.execute(text(mapping_sql)).fetchall()
        
        # 构建映射字典
        metric_mapping = {}
        for mapping in mappings:
            source_key = (
                mapping.source_topic,
                mapping.source_group_name,
                mapping.source_metric_name,
                mapping.source_metric_subitem
            )
            target_key = (
                mapping.target_group_title,
                mapping.target_metric_name,
                mapping.target_metric_path
            )
            metric_mapping[source_key] = target_key
        
        # 使用新的数据结构构建函数
        result = build_structured_data(latest_data, metric_mapping)
        logger.info(f"获取商户数据成功: {result}")
        return result
    except Exception as e:
        logger.error(f"获取商户数据失败: {str(e)}", exc_info=True)
        raise Exception(f"获取商户数据失败: {str(e)}") 