import sys
import pandas as pd

sys.path.append("..")  # 也可以这样

from db_tools import MysqlTool
import requests
from loguru import logger
import math
import warnings
from concurrent.futures import ThreadPoolExecutor, as_completed
import time
from threading import Lock

warnings.filterwarnings("ignore")

# 线程安全的续费统计
renewed_lock = Lock()
renewed_stats = {"success": 0, "error": 0, "total": 0, "processed": 0}


def 索引分组(start_len, end_len, size):
    end = end_len - start_len

    总组数 = math.ceil(end / size)
    print("一共有%s组" % 总组数)
    list_data = []
    for i in range(总组数):
        list_data.append([start_len + i * size, start_len + (i + 1) * size])
    list_data[-1][1] = end_len
    return list_data


def chunk(lst, size):
    from math import ceil

    return list(
        map(
            lambda x: lst[x * size : x * size + size],
            list(range(0, ceil(len(lst) / size))),
        )
    )


def get_user_pay_count(user_id):
    """
    获取单个用户的续费次数（原始串行版本）
    """
    # 只计算购买vip的用户订单,购买vip的用户,且成功的!
    url = f"https://scapi.tayunapi.com/funcLog/userOrders?user_id={user_id}&buy_type=11&status=2"
    try:
        if user_id == 0:
            return -1
        res = requests.get(url)
        count_ = res.json()["count"]
        logger.info(f"{user_id}: -> {count_}")
        return count_
    except:
        logger.error(f"{url}")
        raise "异常"


def get_user_pay_count_concurrent(user_data):
    """
    并发版本的用户续费次数获取（线程安全）
    
    Args:
        user_data (dict): 包含 user_id 和 order_no 的字典
    
    Returns:
        dict: {"user_id": xxx, "order_no": xxx, "is_renewed": xxx, "success": True/False}
    """
    user_id = user_data['user_id']
    order_no = user_data['order_no']
    
    try:
        if user_id == 0:
            result = {"user_id": user_id, "order_no": order_no, "is_renewed": -1, "success": True}
        else:
            url = f"https://scapi.tayunapi.com/funcLog/userOrders?user_id={user_id}&buy_type=11&status=2"
            res = requests.get(url, timeout=10)  # 添加超时
            count_ = res.json()["count"]
            result = {"user_id": user_id, "order_no": order_no, "is_renewed": count_, "success": True}
        
        # 线程安全地更新进度
        with renewed_lock:
            renewed_stats["success"] += 1
            renewed_stats["processed"] += 1
            if renewed_stats["processed"] % 10 == 0:  # 每10条记录输出一次进度
                logger.info(f"📊 续费统计进度: {renewed_stats['processed']}/{renewed_stats['total']} "
                          f"(成功: {renewed_stats['success']}, 失败: {renewed_stats['error']})")
        
        return result
        
    except Exception as e:
        # 线程安全地更新进度
        with renewed_lock:
            renewed_stats["error"] += 1
            renewed_stats["processed"] += 1
            
        logger.error(f"❌ 获取用户 {user_id} 续费信息失败: {e}")
        return {"user_id": user_id, "order_no": order_no, "is_renewed": -2, "success": False}


class Renewed_count(MysqlTool):
    def __init__(self) -> None:
        super().__init__()

    # 获取所有的订单?
    def get_all_order_no(self):
        # is_renewed = -2 就是用户被删除的订单
        # 定义SQL查询
        # is_renewed = -1 就是未匹配的订单
        query = """
        SELECT user_id, order_no
        FROM pay
        WHERE is_renewed = -1 AND status = "成功"
        """
        # is_renewed 未匹配的订单
        # 使用pandas读取数据
        df = pd.read_sql(query, self.engine)
        return df

    def jisuan_renewed(self, df):
        df["is_renewed"] = df["user_id"].apply(get_user_pay_count)
        return df

    def update_one(self, data_info):
        sql = f"""UPDATE pay
        SET is_renewed = {data_info['is_renewed']}
        WHERE order_no = '{data_info['order_no']}';"""
        return self.execute_sql(sql)

    def batch_update_renewed(self, results):
        """
        批量更新续费信息到数据库
        
        Args:
            results (list): 续费统计结果列表
        """
        try:
            update_queries = []
            for result in results:
                if result['success']:
                    sql = f"UPDATE pay SET is_renewed = {result['is_renewed']} WHERE order_no = '{result['order_no']}'"
                    update_queries.append(sql)
            
            # 批量执行更新
            success_count = 0
            for sql in update_queries:
                if self.execute_sql(sql):
                    success_count += 1
            
            logger.info(f"✅ 批量更新完成: {success_count}/{len(update_queries)} 条记录更新成功")
            return success_count
            
        except Exception as e:
            logger.error(f"❌ 批量更新失败: {e}")
            return 0

    def update_many_concurrent(self, max_workers=6):
        """
        并发版本的续费信息更新
        
        Args:
            max_workers (int): 最大并发线程数，建议6-8个
        """
        logger.info(f"🚀 开始并发续费统计 (并发数: {max_workers})")
        
        # 获取需要更新的订单
        df = self.get_all_order_no()
        total_count = len(df)
        logger.info(f"📋 一共需要更新 {total_count} 条续费信息")
        
        if total_count == 0:
            logger.info("✅ 没有需要更新的续费信息")
            return 0, 0
        
        # 重置进度统计
        with renewed_lock:
            renewed_stats.update({"success": 0, "error": 0, "total": total_count, "processed": 0})
        
        start_time = time.time()
        
        # 准备用户数据
        user_data_list = []
        for i in range(total_count):
            row = df.iloc[i]
            user_data_list.append({
                "user_id": row["user_id"], 
                "order_no": row["order_no"]
            })
        
        # 使用线程池执行并发处理
        results = []
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            # 提交所有任务
            future_to_user = {}
            for user_data in user_data_list:
                future = executor.submit(get_user_pay_count_concurrent, user_data)
                future_to_user[future] = user_data['user_id']
            
            logger.info(f"📦 已提交 {total_count} 个并发任务")
            
            # 收集结果
            for future in as_completed(future_to_user):
                user_id = future_to_user[future]
                try:
                    result = future.result()
                    results.append(result)
                except Exception as exc:
                    logger.error(f"💥 用户 {user_id} 续费统计异常: {exc}")
                    results.append({
                        "user_id": user_id, 
                        "order_no": "unknown", 
                        "is_renewed": -2, 
                        "success": False
                    })
                    with renewed_lock:
                        renewed_stats["error"] += 1
                        renewed_stats["processed"] += 1
        
        # 批量更新数据库
        logger.info("💾 开始批量更新数据库...")
        updated_count = self.batch_update_renewed(results)
        
        # 输出最终统计
        end_time = time.time()
        elapsed_time = end_time - start_time
        
        with renewed_lock:
            success_count = renewed_stats["success"]
            error_count = renewed_stats["error"]
            success_rate = (success_count / total_count * 100) if total_count > 0 else 0
        
        logger.info(f"🎉 === 并发续费统计完成 ===")
        logger.info(f"📊 处理统计:")
        logger.info(f"   总数量: {total_count}")
        logger.info(f"   成功: {success_count} ({success_rate:.1f}%)")
        logger.info(f"   失败: {error_count}")
        logger.info(f"   数据库更新: {updated_count}")
        logger.info(f"   耗时: {elapsed_time:.1f}秒")
        logger.info(f"   平均: {elapsed_time/total_count:.2f}秒/条")
        
        return success_count, error_count

    def update_many(self):
        """
        原始串行版本（保持兼容性）
        """
        logger.info("⚠️  使用串行续费统计模式，如需提升性能请使用 update_many_concurrent()")
        
        df = self.get_all_order_no()
        logger.info(f"一共需要更新:{len(df)}")
        dfs = chunk(df, 50)  # 按50个一组
        for df_new in dfs[:]:
            df_renewed = self.jisuan_renewed(df_new)

            for i in df_renewed.index:
                data_info = df_renewed.loc[i].to_dict()
                logger.info(data_info)
                self.update_one(data_info)


if __name__ == "__main__":
    Renewed_count().update_many()
