from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from functools import wraps
import glob
import json
import os
import aiosmtplib
import httpx
from . import op
import asyncio
import mysql.connector
from dbutils.pooled_db import PooledDB
from .log import logger


def space_conversion(value):
    return str(value).replace("\xa0", " ")


def format_time_fields(time_columns):
    """
    装饰器：用于格式化字典中指定列的时间字段
    :param time_columns: 需要格式化时间的列名列表
    """
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            result = func(*args, **kwargs)
            flag = False
            res_data = result
            if result.get("list", None):
                flag = True
                res_data = result["list"]
            if isinstance(res_data, list):
                for index, row_dict in enumerate(res_data):
                    for col, value in row_dict.items():
                        if col == "task_params":
                            row_dict[col] = row_dict[col].split(",")
                        if col == "id":
                            row_dict[col] = result.get("start", 0) + index + 1
                        if col in time_columns:
                            if isinstance(value, str) and 'T' in value:
                                row_dict[col] = value.replace(
                                    'T', ' ')
                            elif isinstance(value, datetime):
                                row_dict[col] = value.strftime(
                                    '%Y-%m-%d %H:%M:%S')
                if flag:
                    result["list"] = res_data
                else:
                    result = res_data
            result.pop("start", None)
            return result
        return wrapper
    return decorator


client = httpx.AsyncClient(follow_redirects=True)
semaphore = asyncio.Semaphore(33)


async def fetch_html(url, headers, data):
    async with semaphore:
        # async with httpx.AsyncClient(follow_redirects=True) as client:
        response = await client.post(url, headers=headers, data=data, timeout=15)
        return response


async def get_rc_html(url, data):
    config = op.load_config()
    cookie = config['user']['cookie']
    logger.info(f"first--{cookie}")
    logger.info(
        f"first--{config['user']['last_login'] != datetime.now().strftime('%Y-%m-%d')}")
    if config['user']['last_login'] != datetime.now().strftime("%Y-%m-%d"):
        op.login()
        config = op.load_config()
        cookie = config['user']['cookie']
    logger.info(f"second--{cookie}")
    headers = {
        'Cookie': cookie,
    }
    # res = requests.post(url, headers=headers, data=data)
    # res = httpx.post(url, headers=headers, data=data, timeout=30)
    res = await fetch_html(url, headers, data)
    logger.info(f"first-res-{res}")
    logger.info(f"first-res-{res.text}")
    logger.info(f"first-res-{res.url}")

    if "<title>TAGTIME Inc ERP Login Page</title>" in res.text or res.url == "http://103.231.253.120/rgerp/login.php":
        print('重新获取cookie')
        op.login()
        config = op.load_config()
        headers["Cookie"] = config['user']['cookie']
        res = await fetch_html(url, headers, data)
    logger.info(f"second-res-{res}")
    return res


async def sendMail(title, msg):
    '''发邮件'''
    sender = '2504880498@qq.com'
    to_list = [
        # '1610546766@qq.com',
        # 'tim@tagtimeasia.com',
        'ttcop@tagtimeasia.com'
    ]
    subject = title
    em = MIMEMultipart()
    em['subject'] = subject
    em['From'] = sender
    em['To'] = ",".join(to_list)

    # 使用 HTML 格式
    html_content = f"""
    <html>
      <body>
        <h2 style='text-align: center;'>{title}</h2>
        <p>{msg}</p>
      </body>
    </html>
    """
    content = MIMEText(html_content, 'html')
    em.attach(content)

    max_retries = 1  # 最大重试次数
    retry_interval = 30  # 重试间隔（秒）

    for retry in range(1, max_retries + 1):
        try:
            print(f"第 {retry} 次尝试连接服务器")
            # 使用 aiosmtplib 发送邮件
            async with aiosmtplib.SMTP(hostname='smtp.qq.com', port=465, use_tls=True) as smtp:
                await smtp.login(sender, 'znpkudlerqdnebbd')
                await smtp.send_message(em)
            print("发送邮件成功")
            await asyncio.sleep(6)  # 等待一段时间
            break  # 发送成功，退出循环
        except aiosmtplib.errors.SMTPServerDisconnected:
            print(f"连接服务器失败，等待 {retry_interval} 秒后重试...")
            await asyncio.sleep(retry_interval)
        except Exception as e:
            print(f"发送邮件失败：{e}")
            break  # 发送失败，退出循环

# 创建连接池对象
pool_rc = PooledDB(
    mysql.connector,  # 连接数据库的模块
    maxconnections=33,  # 连接池允许的最大连接数
    mincached=16,  # 初始缓存的连接数
    maxcached=33,  # 连接池中最大的连接数
    blocking=True,  # 如果没有连接可用时，是否阻塞直到有连接释放

    # host='192.168.1.251',
    # user='root',
    # password='1qaz!QAZ',
    # database='qb_database',

    host='192.168.1.129',
    user='root',
    password='Sushile123.',
    database='rc',

    charset='utf8mb4'  # 设置字符集
)


# 使用连接池连接数据库
def conn_db_rc(dictionary=False):
    # 从连接池中获取一个连接
    conn = pool_rc.connection()
    cursor = conn.cursor(dictionary=dictionary)
    return conn, cursor


def log_to_mysql(func):
    @wraps(func)
    async def wrapper(*args, **kwargs):
        execution_time = datetime.now()
        function_name = func.__name__
        if len(args) == 1:
            function_name = args[0]["task_name"]
        elif len(args) == 2:
            args = [args[len(args) - 1]]
            function_name = args[0]["task_name"]
        arguments = json.dumps({'args': args, 'kwargs': kwargs})
        result = None
        error = None

        try:
            if asyncio.iscoroutinefunction(func):
                result = await func(*args, **kwargs)  # 等待异步函数执行
            else:
                result = func(*args, **kwargs)
            return result
        except Exception as e:
            error = str(e)
            result = f"任务：{function_name}执行失败"
            return Exception(e)
        finally:
            # 记录日志到MySQL
            try:
                connection, cursor = conn_db_rc()
                cursor.execute(
                    "INSERT INTO function_logs (function_name, execution_time, arguments, result, error) VALUES (%s, %s, %s, %s, %s)",
                    (function_name, execution_time,
                     arguments, result, error)
                )
                connection.commit()
            except mysql.connector.Error as err:
                logger.error(f"Error: {err}")
            finally:
                if connection:
                    cursor.close()
                    connection.close()
    return wrapper


def parse_quartz_cron(cron_expr):
    # 分割 cron 表达式
    cron_parts = cron_expr.split(" ")

    # 对应的字段名称
    fields = ['second', 'minute', 'hour',
              'day', 'month', 'day_of_week', 'year']

    # 初始化字典
    cron_dict = {field: None for field in fields}

    # 将 cron 表达式按顺序填充到字典
    for i, part in enumerate(cron_parts):
        field = fields[i]
        if part != '*' and part != '?' and part != 'L':
            cron_dict[field] = part
        elif part == 'L':
            # 处理 L（代表“最后”）
            if field == 'day':
                cron_dict[field] = 'last'  # 最后一日
            else:
                cron_dict[field] = '*'  # 其他字段不处理 L
        elif part == '?':
            # 处理 ?（表示“不指定”）
            # 在Quartz中用于不指定某一字段，通常用于“day_of_week”与“day_of_month”
            cron_dict[field] = '?'
    return cron_dict


def check_cron(cron: str):
    cron_list = cron.split(" ")
    if len(cron_list) == 4:
        cron = "00 " + cron + " ? *"
    elif len(cron_list) != 4 and len(cron_list) != 7:
        return False, cron
    return True, cron


def clear_old_compressed_files(folder_path):
    # 计算昨天的日期
    yesterday = datetime.now() - timedelta(days=1)
    date_str = yesterday.strftime('%Y-%m-%d')  # 格式化为 YYYY-MM-DD

    # 构建要查找的文件模式
    file_pattern = os.path.join(
        folder_path, f'QR-2d {date_str}.*')  # 匹配 QR-2d {日期-1} 的文件
    file_pattern1 = os.path.join(
        folder_path, f'{date_str}.*')  # 匹配 {日期-1} 的文件

    # 查找匹配的文件
    files_to_delete = glob.glob(file_pattern)
    files_to_delete1 = glob.glob(file_pattern1)
    files_to_delete.extend(files_to_delete1)

    # 删除匹配的文件
    for file_path in files_to_delete:
        try:
            os.remove(file_path)
            logger.info(f"Deleted: {file_path}")
        except Exception as e:
            logger.error(f"Error deleting {file_path}: {e}")
