import random
from datetime import datetime, timedelta

from sqlalchemy import asc
from sqlalchemy import func
from sqlalchemy.orm.session import Session

from airflow.patsnap.util.dag_util import workflow_id_from_dag_id
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.jobs.local_task_job import LocalTaskJob
from airflow.kubernetes.kube_client import get_kube_client
from airflow.models.dagpickle import DagPickle
from airflow.models.dagrun import DagRun
from airflow.models.idata_task import Task, OperatorType
from airflow.models.idata_workflow import WorkFlow
from airflow.models.idata_workflow_task import WorkFlowTask
from airflow.models.log import Log
from airflow.models.taskinstance import TaskInstance
from airflow.utils import dates
from airflow.utils import timezone
from airflow.utils.session import provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType

cron_sets = {
    'hourly': '{} * * * *',
    'daily': '{} {} * * *',
    'weekly': '{} {} * * 0',
    'monthly': '{} {} 1 * *',
    'quarterly': '{} {} 1 */3 *',
    'yearly': '{} {} 1 1 *',
}

minutes = [0, 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45]
hours = [0, 1, 2, 3]

scheduler_interval = [
    {
        'label': '每小时',
        'value': 'hourly'
    },
    {
        'label': '每天',
        'value': 'daily'
    },
    {
        'label': '每周',
        'value': 'weekly'
    },
    {
        'label': '每月',
        'value': 'monthly'
    }
]


def get_default_cron(key):
    if key == 'hourly':
        return cron_sets[key].format(random.choice(minutes))
    elif key in cron_sets:
        return cron_sets[key].format(random.choice(minutes), random.choice(hours))


@provide_session
def create_dag_run(dag_id, execution_date: datetime, session: Session = None, run_type=DagRunType.MANUAL):
    dag_pickle = DagPickle.get_by_dag(dag_id, session=session)
    if dag_pickle is None:
        raise AirflowException('工作流未序列化， 请确认已正确保存')

    dag = dag_pickle.pickle
    dag_hash = dag_pickle.pickle_hash
    dag.create_dagrun(
        run_type=run_type,
        execution_date=execution_date,
        start_date=timezone.utcnow(),
        state=State.RUNNING,
        external_trigger=True,
        session=session,
        dag_hash=dag_hash,
        creating_job_id=0,
    )


@provide_session
def diagnose_dagrun(dag_id, execution_date: datetime, session: Session = None):
    data = dict()
    dr = session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.execution_date == execution_date).first()
    if dr is None:
        return None
    dp = DagPickle.get_by_dag(dag_id, session=session)
    dr.dag = dp.pickle
    info = dr.task_instance_scheduling_decisions(session)
    schedulable_tis = [ti.to_json() for ti in info.schedulable_tis]
    changed_tis = info.changed_tis
    finished_tasks = [ti.to_json() for ti in info.finished_tasks]
    unfinished_tasks = [ti.to_json() for ti in info.unfinished_tasks]

    dag = dr.get_dag()
    tis = info.tis
    leaf_task_ids = {t.task_id for t in dag.leaves}
    leaf_tis = [ti.to_json() for ti in tis if ti.task_id in leaf_task_ids]
    data['dag_run'] = {
        'dag_name': dr.dag_name,
        'state': dr.get_state(),
        'external_trigger': dr.external_trigger,
        'run_type': dr.run_type,
        'start_date': dr.start_date.isoformat() if dr.start_date else None,
        'end_date': dr.end_date.isoformat() if dr.end_date else None,
        'last_scheduling_decision': dr.last_scheduling_decision.isoformat(),
        'scheduling_decisions_info': {
            'schedulable_tis': schedulable_tis,
            'changed_tis': changed_tis,
            'finished_tasks': finished_tasks,
            'unfinished_tasks': unfinished_tasks,
            'leaf_tis': leaf_tis
        }
    }

    jmap = dict()
    for ti in info.tis:
        jmap[ti.job_id] = ti.task_id
    jid = list(jmap.keys())
    task_jobs = session.query(LocalTaskJob).filter(LocalTaskJob.id.in_(jid)).all()
    data['task_jobs'] = [{'state': job.state, 'start_date': job.start_date.isoformat() if job.start_date else None,
                          'end_date': job.end_date.isoformat() if job.end_date else None,
                          'latest_heartbeat': job.latest_heartbeat.isoformat() if job.latest_heartbeat else None,
                          'dag_id': job.dag_id,
                          'job_type': job.job_type,
                          'task_id': jmap[job.id]} for job in task_jobs]

    logs = session.query(Log).filter(Log.dag_id == dag_id, Log.execution_date == execution_date).order_by(asc(Log.dttm))
    events = [{'dag_id': log.dag_id,
               'task_id': log.task_id,
               'event': log.event,
               'dttm': log.dttm.isoformat(),
               'execution_date': log.execution_date.isoformat()} for log in logs]
    data['events'] = events

    # Only For K8s executor
    kube_client = get_kube_client()
    pods = kube_client.list_namespaced_pod('idata', label_selector='dag_id={}'.format(dag_id)).to_dict()['items']
    data['pods'] = pods

    return data


@provide_session
def workflow_history(dag_id, session: Session = None, size=20):
    dag_runs = session.query(DagRun).filter(DagRun.dag_id == dag_id).order_by(DagRun.start_date.desc()).limit(
        size).all()
    rs = [{'dag_id': run.dag_id, 'execution_date': dates.to_milliseconds(run.execution_date), 'state': run.state,
           'start_date': dates.to_milliseconds(run.start_date),
           'end_date': dates.to_milliseconds(run.end_date),
           'external_trigger': run.external_trigger,
           'run_type': run.run_type} for run in dag_runs]
    return rs


@provide_session
def workflow_instances(project_id, page=1, page_size=10, dag_id=None, dag_name=None, state=None, run_type=None,
                       execution_date_from=None, execution_date_to=None, session: Session = None):
    qry = session.query(DagRun).filter(DagRun.project_id == project_id).order_by(DagRun.execution_date.desc())
    if dag_id:
        qry = qry.filter(DagRun.dag_id == dag_id)
    if dag_name:
        qry = qry.filter(DagRun.dag_name == dag_name)
    if state:
        qry = qry.filter(DagRun.state == state)
    if run_type:
        qry = qry.filter(DagRun.run_type == run_type)
    if execution_date_from:
        date_from = dates.from_milliseconds(execution_date_from)
        qry = qry.filter(DagRun.execution_date > date_from)
    if execution_date_to:
        date_to = dates.from_milliseconds(execution_date_to)
        qry = qry.filter(DagRun.execution_date < date_to)
    dag_runs_page = qry.paginate(page=page, per_page=page_size, error_out=False)
    results = dag_runs_page.to_json()
    raw = [
        {'dag_id': run.dag_id,
         'execution_date': dates.to_milliseconds(run.execution_date),
         'state': run.state,
         'start_date': dates.to_milliseconds(run.start_date),
         'end_date': dates.to_milliseconds(run.end_date),
         'run_type': run.run_type,
         'workflow_name': run.dag_name,
         'external_trigger': run.external_trigger} for run in results['items']]

    if len(raw) > 0:
        workflow_ids = [int(it['dag_id'].split('-')[-1]) for it in raw]
        wks = session.query(WorkFlow).filter(WorkFlow.id.in_(workflow_ids)).all()
        names = dict()
        for wk in wks:
            names[wk.id] = wk.name
        for it in raw:
            # it['workflow_name'] = names.get(int(it['dag_id'].split('-')[-1]), None)
            if it['end_date']:
                it['duration'] = int((it['end_date'] - it['start_date']) / 1000)
            else:
                it['duration'] = None

    results['items'] = raw
    return results


@provide_session
def query_options_by_project(project_id, session: Session = None):
    wks = session.query(WorkFlow.id, WorkFlow.name).filter(WorkFlow.project_id == project_id).all()
    rs = [{'id': it[0], 'name': it[1]} for it in wks]
    return rs


@provide_session
def workflow_validate(workflow_id, data, session=None):
    if len(data) == 1 and 'name' in data:
        return None
    if len(data) == 1 and 'folder_id' in data:
        return None
    if 'nodes' not in data.get('content', {}):
        return 'Workflow content 不完整，无法保存'
    workflow = WorkFlow.get_workflow(workflow_id, session=session)
    for key in data.keys():
        if key == 'name' and data[key]:
            workflow.name = data[key]
        if key == 'folder_id' and data[key]:
            workflow.folder_id = data[key]
        if key == 'description':
            workflow.description = data[key]
        if key == 'parent_id':
            if data[key] == '':
                data[key] = None
            workflow.parent_id = data[key]
        if key == 'parent_project':
            workflow.parent_project = data[key]
        if key == 'start_time' and data[key]:
            workflow.start_time = dates.from_milliseconds(data[key])
        if key == 'time_out':
            workflow.time_out = data[key]
        if key == 'end_time':
            if data[key] == '' or data[key] is None:
                workflow.end_time = None
            else:
                workflow.end_time = dates.from_milliseconds(data[key])
        if key == 'time_out':
            workflow.time_out = data[key]
        if key == 'interval' and data[key]:
            workflow.interval = data[key]
        if key == 'cron' and data[key]:
            workflow.cron = data[key]
        if key == 'content' and data[key]:
            workflow.content = data[key]
        if key == 'is_paused' and data[key] is not None:
            if not data[key] and workflow.cron is None:
                return 'cron 表达式未设置'
            workflow.is_paused = data[key]
    return workflow.content_error()


@provide_session
def workflow_patch(workflow_id, data, session: Session = None):
    workflow = WorkFlow.get_workflow(workflow_id, session=session)
    for key in data.keys():
        if key == 'name' and data[key]:
            workflow.name = data[key]
        if key == 'folder_id' and data[key]:
            workflow.folder_id = data[key]
        if key == 'description':
            workflow.description = data[key]
        if key == 'parent_id':
            if data[key] == '':
                data[key] = None
            workflow.parent_id = data[key]
        if key == 'parent_project':
            workflow.parent_project = data[key]
        if key == 'start_time' and data[key]:
            workflow.start_time = dates.from_milliseconds(data[key])
        if key == 'time_out':
            workflow.time_out = data[key]
        if key == 'end_time':
            if data[key] == '' or data[key] is None:
                workflow.end_time = None
            else:
                workflow.end_time = dates.from_milliseconds(data[key])
        if key == 'time_out':
            workflow.time_out = data[key]
        if key == 'interval' and data[key]:
            workflow.interval = data[key]
        if key == 'cron' and data[key]:
            workflow.cron = data[key]
        if key == 'content' and data[key]:
            workflow.update_content(data[key], session=session)
        if key == 'is_paused' and data[key] is not None:
            if not data[key] and workflow.cron is None:
                raise AirflowException('cron is not set')
            workflow.is_paused = data[key]
        if key == 'email_alert':
            workflow.email_alert = data[key]
        if key == 'email':
            workflow.email = data[key]
    workflow.update(session=session)
    workflow.sync_dag(session=session)


@provide_session
def mark_dag_failed(dag_id, execution_date: datetime, session: Session = None):
    rs = DagRun.find(dag_id=dag_id, execution_date=execution_date)
    if not rs:
        raise AirflowException('no running dag found')
    dag_run = rs[0]
    dag_run.set_state(State.FAILED)
    session.merge(dag_run)
    for ti in dag_run.get_task_instances(session=session):
        if ti.state == State.RUNNING:
            ti.set_state_if_kill(State.SHUTDOWN, session=session, is_kill=True)
        elif ti.state is None:
            ti.set_state_if_kill(State.UPSTREAM_FAILED, session=session, is_kill=True)
    # sync dag
    workflow = WorkFlow.get_workflow(workflow_id_from_dag_id(dag_id), session=session)
    workflow.sync_dag(session=session)


@provide_session
def get_by_start_date(project_id, start_time, end_time, session=None):
    rs = {
        "scheduled": 0,
        "none": 0,
        "queued": 0,
        "running": 0,
        "success": 0,
        "shutdown": 0,
        "failed": 0,
        "up_for_retry": 0,
        "up_for_reschedule": 0,
        "upstream_failed": 0,
        "skipped": 0,
        "sensing": 0
    }
    tis = DagRun.get_by_start_date(project_id, dates.from_milliseconds(start_time), dates.from_milliseconds(end_time),
                                   session)
    if tis:
        for ti in tis:
            if ti.state is None:
                rs["none"] = rs["none"] + 1
            else:
                rs[ti._state] = rs[ti._state] + 1
    return rs


@provide_session
def execute_workflow(workflow_id, execution_date, session=None):
    import time
    wk = WorkFlow.get_workflow(workflow_id, session=session)
    if execution_date < dates.to_milliseconds(wk.start_time):
        raise AirflowException("计划时间不能小于工作流开始时间 {}".format(execution_date))
    dag_id = 'dag-{}'.format(workflow_id)
    max_runs = conf.getint('core', 'max_active_runs_per_dag')
    rs = DagRun.find(dag_id=dag_id, state=State.RUNNING, session=session)
    if len(rs) >= max_runs:
        raise AirflowException("超过单个工作流最大并行运行数 {}".format(max_runs))
    execution_date_date = dates.from_milliseconds(execution_date)
    create_dag_run(dag_id, execution_date_date, session=session)
    session.commit()
    while True:
        rs = DagRun.find(dag_id=dag_id, execution_date=execution_date_date)
        dag_run = rs[0]
        if dag_run.state == State.SUCCESS:
            break
        elif dag_run.state != State.RUNNING:
            raise AirflowException("workflow execute error state:{}".format(dag_run.state))
        time.sleep(10)


def recursion_clear_history(dag_id, start_date, end_date, session=None):
    wid = dag_id.split("-")[-1]
    session.query(DagRun).filter(DagRun.execution_date >= start_date,
                                 DagRun.execution_date < end_date,
                                 DagRun.dag_id == dag_id).delete()
    session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id,
                                       TaskInstance.execution_date >= start_date,
                                       TaskInstance.execution_date < end_date).delete()
    wfts = WorkFlowTask.query_workflow_task(wid, session)
    for wft in wfts:
        task = Task.get_task(wft.task_id, session)
        if task.operator == OperatorType.DAGRUN:
            task_param = task.param
            child_wid = task_param.get("workflow_id", None)
            if child_wid:
                child_dag_id = 'dag-{}'.format(child_wid)
                recursion_clear_history(child_dag_id, start_date, end_date, session)


@provide_session
def clear_dag_run(dag_id, start_date, end_date, session=None):
    wid = dag_id.split("-")[-1]
    session.query(DagRun).filter(DagRun.execution_date >= start_date,
                                 DagRun.execution_date < end_date,
                                 DagRun.dag_id == dag_id).delete()
    session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id,
                                       TaskInstance.execution_date >= start_date,
                                       TaskInstance.execution_date < end_date).delete()
    wfts = WorkFlowTask.query_workflow_task(wid, session)
    for wft in wfts:
        task = Task.get_task(wft.task_id, session)
        if task.operator == OperatorType.DAGRUN:
            task_param = task.param
            child_wid = task_param.get("workflow_id", None)
            if child_wid:
                child_dag_id = 'dag-{}'.format(child_wid)
                recursion_clear_history(child_dag_id, start_date, end_date, session)


@provide_session
def clear_history_not_running(date_before, session=None):
    session.query(DagRun).filter(DagRun.execution_date < date_before,
                                 DagRun.state != State.RUNNING).delete()
    session.query(TaskInstance).filter(TaskInstance.state != State.RUNNING,
                                       TaskInstance.execution_date < date_before).delete()
    session.query(LocalTaskJob).filter(LocalTaskJob.state != State.RUNNING,
                                       LocalTaskJob.start_date < date_before).delete()
    session.query(Log).filter(Log.execution_date < date_before).delete()


@provide_session
def retry_failed(dag_id, execution_date, session=None):
    # 查询对应失败的taskinstance
    wid = dag_id.split("-")[-1]
    wfts = WorkFlowTask.query_workflow_task(wid, session)
    for wft in wfts:
        task = Task.get_task(wft.task_id, session)
        if task.operator == OperatorType.DAGRUN:
            task_param = task.param
            child_wid = task_param.get("workflow_id", None)
            if child_wid:
                child_dag_id = 'dag-{}'.format(child_wid)
                recursion_clear_history(child_dag_id, execution_date - timedelta(seconds=1),
                                        execution_date + timedelta(seconds=1), session)
    qry = session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id,
                                             TaskInstance.execution_date == execution_date)
    tis = qry.order_by(TaskInstance.start_date.desc()).all()
    for ti in tis:
        if ti.state == State.FAILED or ti.state == State.UPSTREAM_FAILED:
            ti.set_state_date(state=None, session=session)

    dag_run = DagRun.get_dag_run(session=session, dag_id=dag_id, execution_date=execution_date)
    if dag_run:
        dag_run.set_start_date(start_date=timezone.utcnow() - timedelta(seconds=1))
        dag_run.set_state(state=State.RUNNING)


@provide_session
def workflow_instance_count(dag_id, state, session: Session = None):
    return session.query(func.count('*')).filter(DagRun.dag_id == dag_id, DagRun.state == state).scalar()
