import datetime
import json
import os
import time
import random
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ProcessPoolExecutor
from sqlalchemy import desc
from email.mime.text import MIMEText
from email.header import Header
import smtplib
import models
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from fastapi import APIRouter, Depends
from fastapi.encoders import jsonable_encoder
from jose import jwt
from sqlalchemy.orm import Session

from models.db import get_db, SessionLocal
from models.system import System
from views.utils.log import logger

from config import interpreter
from schemas import response_success, response_error
from schemas.scheduler import TaskSchema, TaskUpdateSchema
from views import SECRET_KEY, ALGORITHM, oauth2_scheme, get_current_user_name
from views.utils.toolkit_service import python_insert_args, shell_insert_args

router = scheduler_router = APIRouter()


@router.post("/")
async def create(form: TaskSchema, db: Session = Depends(get_db), token: str = Depends(oauth2_scheme)):
    task = models.Task(
        name=form.name,
        script=form.script,
        cron=form.cron,
        status=2,
        args=form.args if form.args else [],
        recount=form.recount,
        enable=form.enable,
        delay=form.delay,
        username=get_current_user_name(token)
    )
    db.add(task)
    db.commit()
    return response_success("新增成功")


@router.get("/")
async def get_list(db: Session = Depends(get_db)):
    db_data = db.query(models.Task, models.Toolkit.name).join(models.Toolkit,
                                                              models.Task.script == models.Toolkit.id).all()
    # 构建响应数据
    result = []
    for task, toolkit_name in db_data:
        task_data = vars(task)  # 将 Task 对象转换为字典
        task_data['script_name'] = toolkit_name
        result.append(task_data)

    return response_success(data={'list': result})


@router.put("/")
async def update(form: TaskUpdateSchema, db: Session = Depends(get_db)):
    if form.args:
        for args_item in form.args:
            # 处理密码类型加密
            if args_item['type'] == 'password' and len(args_item['value']) < 60:
                args_item['value'] = jwt.encode({'sub': args_item['value']}, SECRET_KEY,
                                                algorithm=ALGORITHM)

    # 不更新空数据
    update_data = {}
    for k, v in form.dict().items():
        if v is not None:
            update_data[k] = v

    db_data = db.query(models.Task).filter_by(id=form.id).first()
    print(update_data)
    for k, v in update_data.items():
        if k != 'id':
            setattr(db_data, k, v)
    db_data.m_time = datetime.datetime.now()
    db.commit()
    return response_success("更新成功")


@router.delete("/{id}")
async def delete(id: str, db: Session = Depends(get_db)):
    db_data = db.query(models.Task).filter_by(id=id).first()
    if not db_data:
        return response_error(msg="该任务不存在！")
    db.delete(db_data)
    db.commit()
    return response_success("删除成功")


@router.get("/histories")
async def get_log(page: int, page_size: int, task_id: int = None, db: Session = Depends(get_db)):
    if not page > -1 or not page_size > 5:
        return response_error('参数错误')

    db_task_histories_query = db.query(models.TaskHistory)
    if task_id:
        print('sdf============')
        print(task_id)
        db_task_histories_query = db_task_histories_query.filter_by(task_id=task_id)

    db_task_histories = db_task_histories_query.order_by(desc(models.TaskHistory.create_time)).offset(
        page * page_size).limit(page_size).all()
    db_count = db_task_histories_query.count()
    return response_success(data={'list': jsonable_encoder(db_task_histories), 'total': db_count})


script_queue = {}
context = {}


def send_email(subject, content, receiver):
    # 邮箱登录信息
    smtp_server = 'smtp.qq.com'
    username = '894193847@qq.com'
    password = 'qzegnmjxxdkrbehj'

    # 发件人和收件人信息
    sender = '894193847@qq.com'

    # 创建 MIMEText 对象并设置邮件头部信息
    message = MIMEText(content, 'plain', 'utf-8')
    message['From'] = Header(sender)
    message['To'] = Header(receiver)
    message['Subject'] = Header(subject)

    try:
        # 登录并发送邮件
        smtp_obj = smtplib.SMTP_SSL(smtp_server, 465)
        smtp_obj.login(username, password)
        smtp_obj.sendmail(sender, receiver, message.as_string())
        smtp_obj.quit()
        logger.info("邮件发送成功:", subject)
    except smtplib.SMTPException as e:
        logger.error("邮件发送失败:", e)


def run_script(data, c_id):
    """运行定时任务的脚本"""
    import pty

    delay = data.get('delay')
    if delay := delay.split('-'):
        start = int(delay[0])
        end = int(delay[1])
        if start != 0 and end != 0:
            if start < end:
                random_number = random.randint(start, end)
                logger.info(f"任务 {data['name']} 延迟{random_number}秒后执行")
                time.sleep(random_number)

    db = SessionLocal()

    db_task = db.query(models.Task).filter_by(id=data['id']).first()
    db_tool = db.query(models.Toolkit).filter_by(id=data['script']).first()
    if not db_tool or not db_task:
        return False

    now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
    run_name = f"{db_task.name}_{now_time}"
    cache_dir = os.sep.join([os.getcwd(), 'cache', 'toolkit'])

    if not os.path.isdir(cache_dir):
        os.makedirs(cache_dir)

    if not db_tool.content:
        return response_error(msg='没有代码可供执行')
    run_args = json.loads(db_tool.args) if db_tool.args else []
    # 设置脚本参数
    if (args := data.get('args')) and run_args:
        for item in run_args:
            if item['id'] not in args:
                continue

            item['value'] = args[item['id']]

    content_args = ''
    if run_args:
        if db_tool.type == 1:
            content_args = shell_insert_args(run_args)
        elif db_tool.type == 2:
            content_args = python_insert_args(run_args)

    content_body = f"{content_args}\n{db_tool.content}"

    interpreter_enum = {
        1: (run_name + '.sh', '/usr/bin/bash'),
        2: (run_name + '.py', interpreter),
    }
    script = f"{cache_dir}{os.sep}{interpreter_enum[db_tool.type][0]}"

    with open(script, 'w+', encoding='utf-8') as f:
        f.write(content_body)

    exec_user = db_tool.exec_user
    # 执行的命令
    command = [interpreter_enum[db_tool.type][1], script]
    workspace = db_tool.workspace
    os.system(f'dos2unix {script}')

    if exec_user:
        import pwd
        os.chown(script, pwd.getpwnam(exec_user).pw_uid, pwd.getpwnam(exec_user).pw_gid)

    retry = 0
    recount = db_task.recount if db_task.recount else 0
    recount += 1

    while retry < recount:
        pid, fd = pty.fork()
        if pid == 0:
            if workspace:
                os.chdir(workspace)
            if exec_user:
                os.setuid(pwd.getpwnam(exec_user).pw_uid)
            # 在子进程中执行命令.
            os.execvp(command[0], command)
        else:
            history = b''
            # 在父进程中读取输出并显示
            try:
                while True:
                    try:
                        read_bytes = os.read(fd, 1024)
                        if not read_bytes:
                            break
                        history += read_bytes
                    except OSError:
                        break
            finally:
                # 确保文件描述符被关闭
                os.close(fd)

            # 等待子进程结束，并获取其退出状态
        _, exit_status = os.waitpid(pid, 0)

        task_history = models.TaskHistory(task_id=db_task.id,
                                          task_name=db_task.name,
                                          script_name=db_tool.name,
                                          script_id=db_tool.id,
                                          create_time=datetime.datetime.now(),
                                          username=db_task.username,
                                          status=exit_status,
                                          content=history)

        db.add(task_history)
        db_task.next_time = context[c_id]['job'].next_run_time
        db_task.last_status = exit_status
        db.commit()

        if exit_status == 0:
            break

        if retry + 1 == recount:
            system = db.query(System).first()
            try:
                email = json.loads(system.config)['notice']['email']
                if email['on']:
                    send_email('任务执行失败',
                               f"{db_task.name} {db_tool.name} {datetime.datetime.now()} 执行失败: \n{history}",
                               email['account'])
            except ValueError:
                logger.error('邮件通知配置错误！')

        if retry + 1 < recount:
            time.sleep(30)

        retry += 1

    logger.info(f"{data['name']} task is done")

    db.close()


jobstores = {
    'default': SQLAlchemyJobStore(url='sqlite:///database.db')  # SQLAlchemyJobStore指定存储链接
}
executors = {
    'default': {'type': 'threadpool', 'max_workers': 30},  # 最大工作线程数20
    'processpool': ProcessPoolExecutor(max_workers=10)  # 最大工作进程数为5
}

scheduler = AsyncIOScheduler(executors=executors)


def scheduler_start():
    scheduler.start()
    logger.info('Start async scheduler')

    scheduler.add_job(sync_tasks, 'interval', name='__sync_tasks', seconds=10)


def sync_tasks():
    db = SessionLocal()
    db_tasks = db.query(models.Task).all()

    global context
    jobs = scheduler.get_jobs()

    # 删除不匹配的job
    for job in jobs:
        if job.name == '__sync_tasks':
            continue
        job_name, m_time = job.name.split('&')
        tasks = list(filter(lambda x: x.name == job_name and int(x.m_time.timestamp()) == int(m_time), db_tasks))
        if not tasks:
            logger.info(f"Successfully deleted task '{job.name}'")
            job.remove()

    # 添加缺失的job
    for task in db_tasks:
        if not task.enable:
            continue
        jobs = scheduler.get_jobs()
        jobs = list(filter(lambda x: task.name in x.name and str(int(task.m_time.timestamp())) in x.name, jobs))
        if jobs:
            continue

        task_dict = jsonable_encoder(task)
        c_id = time.time_ns()
        timestamp = int(task.m_time.timestamp())
        minute, hour, day, month, week = task.cron.split()
        job = scheduler.add_job(run_script, 'cron', name=f"{task.name}&{timestamp}",
                                minute=minute, hour=hour, day=day, month=month, day_of_week=week,
                                timezone='Asia/Shanghai', args=(task_dict, c_id))
        context[c_id] = {
            'job': job,
            'task': task,
            'retry': 0,
            'trigger': job.trigger
        }

        task.next_time = job.next_run_time
        logger.info(f"Successfully created task '{job.name}'")

    db.commit()
    db.close()
