# your_app/tasks.py
import traceback

from celery_main import app as celery_app
from common.models.base_data import ArrearsInformationRecord, GroupCustomerInfo, OperationLogCelery
from common.models.system import Dictionary
# from common.scheduler import logger
import logging
from celery import shared_task, Celery
import csv
import io
from openpyxl import load_workbook
from django.db import transaction, connection, IntegrityError
import pandas as pd

# 初始化 logger
logger = logging.getLogger(__name__)


# 上传
@celery_app.task(name='async_import_task')
def async_import_task(user_id, clear_before_import, file_data, file_extension):
    """异步处理欠费导入的核心任务"""
    print("=============5555===================")
    try:
        # 初始化计数器
        cleared_count = 0
        valid_count = 0
        error_count = 0
        error_details = []
        print("==============1====================")
        # 初始化用户对象
        from django.contrib.auth import get_user_model
        User = get_user_model()
        user = User.objects.get(id=user_id)

        # 1. 安全清空数据（如果需要）
        if clear_before_import:
            cleared_count = _safe_clear_arrears(user)
        print("==============2====================")
        # 2. 文件解析
        if file_extension == '.csv':
            data = _parse_csv(file_data)
        else:
            data = _parse_excel(file_data)
        print("==============解析完成====================")
        # 3. 数据校验与清洗
        valid_records, errors = _validate_records(data, user)
        valid_count = len(valid_records)
        error_details = errors
        print(f"==============数据校验与清洗完成===================={valid_count}===错误：{len(errors)}")
        # 4. 批量写入数据库
        if valid_records:
            _bulk_create_records(valid_records)

        # 记录操作日志
        _create_operation_log(user, cleared_count, valid_count, error_count)

        return {
            'cleared': cleared_count,
            'imported': valid_count,
            'errors': error_details,
            'status': 'SUCCESS'
        }

    except Exception as e:
        logger.debug(f"异步导入任务失败: {str(e)}")
        return {
            'status': 'FAILURE',
            'message': str(e),
            'traceback': traceback.format_exc()
        }


# -------------------- 辅助函数 --------------------
def _safe_clear_arrears(user):
    """安全清空欠费数据（分批删除防锁表）"""
    cleared_count = 0
    chunk_size = 5000
    while True:
        records = ArrearsInformationRecord.objects.all()[:chunk_size]
        if not records:
            break
        deleted, _ = records.delete()
        cleared_count += deleted
    return cleared_count


def _parse_csv(file_data):
    """解析CSV文件（含BOM头处理）"""
    decoded_file = file_data.decode('utf-8-sig')
    reader = csv.DictReader(io.StringIO(decoded_file))
    return [row for row in reader]


def _parse_excel(file_data):
    # """修复Excel表头解析（兼容openpyxl不同版本）"""
    # wb = load_workbook(io.BytesIO(file_data), read_only=True, data_only=True)
    # ws = wb.active
    #
    # # 解析表头
    # header_row = next(ws.iter_rows(min_row=1, max_row=1, values_only=False), [])
    # header = []
    # for cell in header_row:
    #     header_value = cell.value if cell.value is not None else f'空列_{cell.column_letter}'
    #     header.append(header_value)
    #
    # # 解析数据行
    # excel_data = []
    # for row_num, row in enumerate(ws.iter_rows(min_row=2, values_only=True), 2):
    #     row_dict = {header[i]: row[i] for i in range(len(header)) if header[i] is not None}
    #     excel_data.append(row_dict)

    df = pd.read_excel(io.BytesIO(file_data))
    return df.to_dict('records')

    # return excel_data


def _validate_records(data, user):
    """逐条校验数据格式"""
    valid_records = []
    errors = []
    # 一次性加载所有集团信息到缓存
    group_cache = {group.group_name: group.id for group in GroupCustomerInfo.objects.all()}
    # 一次性加载所有业务大类信息到缓存
    biz_dict_cache = _load_business_category_cache()

    for row_num, row in enumerate(data, 1):
        try:
            # 集团校验
            group_name = row['集团名称'].strip()
            if not group_name:
                raise ValueError('集团名称不能为空')
            group_id = group_cache.get(group_name)
            if not group_id:
                raise ValueError(f'集团"{group_name}"不存在')

            # 账期校验
            min_period = row['最小账期']
            max_period = row['最大账期']
            if min_period > max_period:
                raise ValueError('最小账期不能大于最大账期')

            arrears_period_count = row['欠费账期数']

            # 金额校验
            amounts = {f: _parse_amount(row.get(f, 0)) for f in ['全量欠费（元）', '存量欠费（元）', '新增欠费（元）', '当前坏账（元）']}
            if any(v < 0 for v in amounts.values()):
                raise ValueError('金额不能为负数')

            # 业务大类校验
            input_biz = row['业务大类'].strip()
            matched = _match_business_category(input_biz, biz_dict_cache)
            if not matched:
                raise ValueError(f'业务大类"{input_biz}"不存在')

            valid_records.append({
                'group_id': group_id,
                'min_billing_period': min_period,
                'max_billing_period': max_period,
                'arrears_period_count': arrears_period_count,
                'total_arrears': amounts['全量欠费（元）'],
                'stock_arrears': amounts['存量欠费（元）'],
                'new_arrears': amounts['新增欠费（元）'],
                'current_bad_debt': amounts['当前坏账（元）'],
                'creator_id': user.id,
                'business_category_id': matched.id,
                'business_category_label': matched.label,
                'business_category_value': matched.value
            })

        except Exception as e:
            errors.append({
                'row': row_num,
                'message': str(e),
                'raw_data': row
            })
    return valid_records, errors


def _match_business_category(input_biz, biz_dict_cache):
    """匹配业务大类字典"""
    for key in [input_biz.lower(), input_biz.upper(), input_biz]:
        if key in biz_dict_cache:
            return biz_dict_cache[key]
    return None


def _load_business_category_cache():
    """加载业务大类字典树（仅value节点）"""
    cache = {}
    parent_node = Dictionary.objects.filter(label='业务大类', is_value=False, status=True).first()
    if not parent_node:
        raise RuntimeError('请先创建父字典：业务大类（非value节点）')

    for node in Dictionary.objects.filter(parent=parent_node, is_value=True, status=True).select_related():
        cache[node.label.lower()] = node
        cache[node.value.lower()] = node
        cache[node.id] = node
    return cache


def _parse_amount(value):
    """解析金额（支持货币符号、千分位）"""
    if isinstance(value, str):
        value = value.replace('￥', '').replace('$', '').replace(',', '').strip()
    try:
        return round(float(value), 2) if value else 0.0
    except (TypeError, ValueError):
        raise ValueError(f'无效金额：{value}，请使用数字或可转换为数字的字符串')


# def _bulk_create_records(records):
#     """高性能批量插入（比 Django bulk_create 快 5-10 倍）"""
#     if not records:
#         return
#
#     fields = [
#         'group_id', 'min_billing_period', 'max_billing_period',
#         'total_arrears', 'stock_arrears', 'new_arrears',
#         'current_bad_debt', 'creator_id',
#         'business_category_id', 'business_category_label', 'business_category_value'
#     ]
#
#     # 数据预处理（核心修复点）
#     processed_params = []
#     for record in records:
#         # 1. 外键必须使用ID（修复原参数可能传递对象的问题）
#         if not isinstance(record.get('business_category_id'), int):
#             raise ValueError("business_category必须传递字典节点ID")
#
#         # 2. 强制填充冗余字段（避免信号未触发导致的数据不一致）
#         try:
#             dict_node = Dictionary.objects.get(pk=record['business_category_id'])
#             record['business_category_label'] = dict_node.label
#             record['business_category_value'] = dict_node.value
#         except Dictionary.DoesNotExist:
#             raise IntegrityError(f"字典节点ID {record['business_category_id']} 不存在")
#
#         # 3. 转换Decimal字段（避免字符串直接插入）
#         for field in ['total_arrears', 'stock_arrears', 'new_arrears', 'current_bad_debt']:
#             record[field] = round(float(record[field]), 2)  # 或保持Decimal类型
#
#         processed_params.append(tuple(record[field] for field in fields))
#
#         print(f"校验数据完成{len(processed_params)}")
#
#     # 安全批量插入（核心优化点）
#     batch_size = 10000
#     with connection.cursor() as cursor:
#         for i in range(0, len(processed_params), batch_size):
#             batch = processed_params[i:i + batch_size]
#             try:
#                 # 使用executemany替代手动拼接（数据库原生支持）
#                 cursor.executemany(
#                     """
#                     INSERT INTO arrears_information_record
#                     ({fields})
#                     VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
#                     """.format(fields=','.join(fields)),
#                     batch
#                 )
#             except Exception as e:
#                 connection.rollback()
#                 raise RuntimeError(f"批次 {i // batch_size + 1} 插入失败: {str(e)}")
#             else:
#                 connection.commit()
#                 print(f"批次 {i // batch_size + 1} 插入失成功")
#     return len(processed_params)
def _transform_data(records):
    """将校验后的数据转换为适合批量插入的格式"""
    processed_params = []
    for record in records:
        # 优化数据处理，减少不必要的操作
        params = [
            record['group_id'],
            record['min_billing_period'],
            record['max_billing_period'],
            record['arrears_period_count'],
            record['total_arrears'],
            record['stock_arrears'],
            record['new_arrears'],
            record['current_bad_debt'],
            record['creator_id'],
            record['business_category_id'],
            record['business_category_label'],
            record['business_category_value']
        ]
        processed_params.append(params)
    return processed_params


def _bulk_create_records(records):
    """批量插入校验后的数据"""
    if not records:
        return 0

    processed_params = _transform_data(records)
    fields = [
        'group_id', 'min_billing_period', 'max_billing_period', 'arrears_period_count', 'total_arrears',
        'stock_arrears', 'new_arrears',
        'current_bad_debt', 'creator_id', 'business_category_id',
        'business_category_label', 'business_category_value'
    ]
    batch_size = 10000  # 调整批量插入大小
    table_name = 'arrears_information_record'  # 表名
    print(f"校验数据完成{len(processed_params)}")
    try:
        with connection.cursor() as cursor:
            # 开始事务
            connection.commit()
            connection.set_autocommit(False)
            for i in range(0, len(processed_params), batch_size):
                batch = processed_params[i:i + batch_size]
                try:
                    cursor.executemany(
                        f"INSERT INTO {table_name} ({', '.join(fields)}) VALUES ({', '.join(['%s'] * len(fields))})",
                        batch
                    )
                    print(f"批次 {i // batch_size + 1} 插入成功")
                except Exception as e:
                    connection.rollback()
                    raise RuntimeError(f"批次 {i // batch_size + 1} 插入失败: {str(e)}")
            # 提交事务
            connection.commit()
    finally:
        # 恢复自动提交
        connection.set_autocommit(True)

    return len(processed_params)


def _safe_int(value, default=0):
    """安全转换为整数"""
    try:
        return int(value) if value is not None else default
    except (ValueError, TypeError):
        logger.warning(f"无效整数转换：{value} → 使用默认值 {default}")
        return default


def _create_operation_log(user, cleared_count, imported_count, error_count):
    """记录操作日志"""
    OperationLogCelery.objects.create(
        user=user,
        action='IMPORT_ARREARS',
        detail=f'导入欠费数据：清空{cleared_count}条，成功导入{imported_count}条，失败{error_count}条'
    )


class GroupCache:
    _instance = None
    _cache = {}

    @classmethod
    def get_instance(cls):
        if not cls._instance:
            cls._instance = cls()
            cls._load_cache()
        return cls._instance

    @classmethod
    def _load_cache(cls):
        cls._cache = {
            group.group_name.lower(): group.id
            for group in GroupCustomerInfo.objects.filter(is_del=False)
        }


class BizCategoryCache:
    _instance = None
    _cache = {}

    @classmethod
    def get_instance(cls):
        if not cls._instance:
            cls._instance = cls()
            cls._load_cache()
        return cls._instance

    @classmethod
    def _load_cache(cls):
        parent = Dictionary.objects.get(label='业务大类', is_value=False)
        cls._cache = {
            node.label.lower(): node.id
            for node in Dictionary.objects.filter(parent=parent, is_value=True, status=True)
        }
