import asyncio
import os
import subprocess
import time
import uuid
import pymysql
from loguru import logger
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from config import DOMAIN, alluredir
from sql import MySQLHelper
from wechat_dingding_bot_api import WeixinRobot, DingdingRobot


db = MySQLHelper()


async def run_case_(project_id, project_name, casedata):
    print(f'\n\n\n\n\n')
    logger.info(f'开始执行 {project_name}')
    timestamp = time.strftime('%Y%m%d%H%M%S')
    unique_id = uuid.uuid4().hex  # 生成唯一ID
    allure_dir = alluredir(f"{timestamp}_{unique_id}")  # 使用唯一ID生成目录
    update_state = "update test_report set state = 3, path = %s where id = %s;"
    await db.execute_update(update_state, (allure_dir, casedata['report_id']))
    os.makedirs(allure_dir, exist_ok=True)
    select_project = 'select notice_type, threads, notice_token, notice_secret from project where id = %s;'
    notice = await db.execute_query(select_project, (project_id,))

    test_file = './tests/test_01.py' if casedata['run_environment'] in [1, 2] else './tests/test_02.py'
    pytest_cmd = [
        'pytest', test_file,
        '-v',
        f'--alluredir={allure_dir}',
        f'--case_ids={casedata["case_id"]}',
        '--reruns', '2',
        '-n', f'{notice[0]["threads"]}',  # 使用多进程执行
        '--dist', 'load'  # 使用负载均衡策略分配测试
    ]

    result = subprocess.run(pytest_cmd, shell=False)
    command = f"allure generate {allure_dir} -o {allure_dir}/reports && allure-combine {allure_dir}/reports --dest {allure_dir}/reports"
    subprocess.run(command, shell=True, capture_output=True)
    # 打印 stdout 和 stderr 以便调试
    # print('stdout:', result.stdout.decode())
    # print('stderr:', result.stderr.decode())

    # 检查退出代码
    exit_code = result.returncode
    path = f'{DOMAIN}' + allure_dir.replace('/home', '') + '/reports/complete.html#behaviors'
    print('exit_code:', exit_code)
    if exit_code != 0:
        update_test_result = "update test_report set state = 2, path = %s where id = %s;"
        await db.execute_update(update_test_result, (path, casedata['report_id']))

        if notice[0]['notice_type'] == 1:
            DingdingRobot(notice[0]['notice_token'], notice[0]['notice_secret']).action_card(
                f'{project_name} 运行报错',
                f"![screenshot](https://gitee.com/shangguanyi666/picture/raw/master/%E8%BF%90%E8%A1%8C%E5%A4%B1%E8%B4%A5.png) \n\n #### {project_name} 运行报错 \n\n 请通过测试报告查看详情",
                f"{DOMAIN}",
                f"{path}"
            )
        elif notice[0]['notice_type'] == 2:
            WeixinRobot(notice[0]['notice_token']).img_text_api(
                f"{project_name} 运行报错",
                path,
                'https://gitee.com/shangguanyi666/picture/raw/master/%E8%BF%90%E8%A1%8C%E5%A4%B1%E8%B4%A5.png',
                '请通过测试报告查看详情')
        else:
            print('无通知方式')
    else:
        update_test_result = "update test_report set state = 1, path = %s where id = %s;"
        await db.execute_update(update_test_result, (path, casedata['report_id']))


async def process_data_task(item):
    logger.info(f'处理项目: {item["name"]}')
    select_case = 'select test_case.id from project, test_case where project.id=test_case.project_id and test_case.delete_time is null and test_case.state =1 and project.id=%s;'
    case_id = await db.execute_query(select_case, item['id'])
    case_list = [j['id'] for j in case_id]
    logger.info(f'case_list: {case_list}')
    if case_list:
        create_time = time.strftime("%Y-%m-%d %H:%M:%S")
        query_existing = "SELECT id FROM test_report WHERE case_id = %s AND title = %s AND project_name = %s AND create_time = %s;"
        data = (str(case_list), '定时任务', item['name'], create_time)
        existing_record = await db.execute_query(query_existing, data)
        if not existing_record:
            logger.info(f'记录不存在，插入: {data}')
            try:
                add_test_result = "INSERT INTO test_report (case_id, title, project_name, create_time) VALUES (%s, %s, %s, %s);"
                await db.execute_update(add_test_result, data)
            except pymysql.IntegrityError as e:
                logger.error(f'插入数据失败，原因: 唯一约束违规. 数据: {data}. 错误信息: {e}')
            except Exception as e:
                logger.error(f'插入数据失败: {data}. 错误信息: {e}')
        else:
            logger.info(f'记录已存在，未重复插入: {existing_record}')
        query_test_result = "SELECT id, case_id FROM test_report WHERE case_id = %s AND title = %s AND project_name = %s AND state = 0 AND create_time = %s;"
        test_result_id = await db.execute_query(query_test_result, data)
        logger.info(f'查询测试结果: {test_result_id}')
        if not test_result_id:
            logger.error("未找到对应的测试结果")
            return
        result = {
            'case_id': eval(test_result_id[0]['case_id']),
            'report_id': test_result_id[0]['id'],
            'run_environment': item['type'],
        }
        await run_case_(item['id'], item['name'], result)


async def check_data_task(scheduler):
    select_project = 'SELECT id, name, type, timing FROM project WHERE delete_time IS NULL AND timing IS NOT NULL AND timing <> "";'
    data = await db.execute_query(select_project)

    if data:
        logger.info('data存在')

        # 获取所有现有任务的 ID
        existing_job_ids = {job.id for job in scheduler.get_jobs() if job.id.startswith('process_data_task_')}

        # 创建空集合 用于存储数据库中的任务 ID
        current_job_ids = set()

        for item in data:
            job_id = f"process_data_task_{item['id']}"
            cron_expression = item['timing']
            logger.info(f"处理项目: {item['name']} 使用 cron 表达式: {cron_expression}")

            try:
                # 从数据库中获取的 Cron 表达式转换为可用于调度器的触发器
                trigger = CronTrigger.from_crontab(cron_expression)
            except ValueError as e:
                logger.error(f"Cron表达式无效: {cron_expression}, 错误: {e}")
                continue

            current_job_ids.add(job_id)

            existing_job = scheduler.get_job(job_id)

            if existing_job:
                logger.info(f"任务 {job_id} 已存在，重新添加任务")
                scheduler.remove_job(job_id)
                scheduler.add_job(process_data_task, trigger, args=[item], id=job_id)
            else:
                logger.info(f"任务 {job_id} 不存在，添加新任务")
                scheduler.add_job(process_data_task, trigger, args=[item], id=job_id)

            logger.info(f"已调度任务 {job_id} 使用 cron 表达式: {cron_expression}")

        # 取消数据库中不再存在的任务
        jobs_to_remove = existing_job_ids - current_job_ids
        for job_id in jobs_to_remove:
            job = scheduler.get_job(job_id)
            if job:
                scheduler.remove_job(job_id)
                logger.info(f"已取消任务 {job_id}")
    else:
        logger.info('data不存在')
        # 如果数据库中没有任务，那么取消所有以 'process_data_task_' 开头的任务
        existing_job_ids = {job.id for job in scheduler.get_jobs() if job.id.startswith('process_data_task_')}
        for job_id in existing_job_ids:
            job = scheduler.get_job(job_id)
            if job:
                scheduler.remove_job(job_id)
                logger.info(f"已取消任务 {job.id}")


# 调度器初始化和主逻辑
if __name__ == '__main__':
    scheduler = AsyncIOScheduler()
    scheduler.add_job(check_data_task, IntervalTrigger(minutes=30), args=[scheduler])
    try:
        scheduler.start()
        asyncio.get_event_loop().run_forever()
    except (KeyboardInterrupt, SystemExit):
        pass
    finally:
        scheduler.shutdown()
