import random
from datetime import datetime, timedelta

from sqlalchemy.orm.session import Session

from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.models import TaskInstance
from airflow.models.dagpickle import DagPickle
from airflow.models.dagrun import DagRun
from airflow.utils import dates
from airflow.utils import timezone
from airflow.utils.session import provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType

cron_sets = {
    'hourly': '{} * * * *',
    'daily': '{} 0 * * *',
    'weekly': '{} 0 * * 0',
    'monthly': '{} 0 1 * *',
    'quarterly': '{} 0 1 */3 *',
    'yearly': '{} 0 1 1 *',
}

minutes = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50]

scheduler_interval = [
    ('hourly', '每小时'),
    ('daily', '每天'),
    ('weekly', '每周'),
    ('monthly', '每月'),
    ('quarterly', '每季度'),
    ('yearly', '每年')
]


def get_default_cron(key):
    if key in cron_sets:
        return cron_sets[key].format(random.choice(minutes))


@provide_session
def create_dag_run(dag_id, execution_date: datetime, session: Session = None, run_type=DagRunType.MANUAL):
    dag_pickle = DagPickle.get_by_dag(dag_id, session=session)
    if dag_pickle is None:
        raise AirflowException('dag_pickle not found')

    dag = dag_pickle.pickle
    dag_hash = dag_pickle.pickle_hash
    dag.create_dagrun(
        run_type=run_type,
        execution_date=execution_date,
        start_date=timezone.utcnow(),
        state=State.RUNNING,
        external_trigger=True,
        session=session,
        dag_hash=dag_hash,
        creating_job_id=0,
    )


@provide_session
def workflow_history(dag_id, session: Session = None, size=20):
    dag_runs = session.query(DagRun).filter(DagRun.dag_id == dag_id).order_by(DagRun.execution_date.desc()).limit(
        size).all()
    rs = [{'dag_id': run.dag_id, 'execution_date': dates.to_milliseconds(run.execution_date), 'state': run.state,
           'start_date': dates.to_milliseconds(run.start_date),
           'end_date': dates.to_milliseconds(run.end_date),
           'external_trigger': run.external_trigger} for run in dag_runs]
    return rs


@provide_session
def mark_dag_failed(dag_id, execution_date: datetime, session: Session = None):
    rs = DagRun.find(dag_id=dag_id, execution_date=execution_date)
    if not rs:
        raise AirflowException('no running dag found')
    dag_run = rs[0]
    dag_run.set_state(State.FAILED)
    session.merge(dag_run)
    for ti in dag_run.get_task_instances(session=session):
        if ti.state == State.RUNNING:
            ti.set_state(State.SHUTDOWN, session=session)
        elif ti.state is None:
            ti.set_state(State.UPSTREAM_FAILED, session=session)


@provide_session
def execute_workflow(workflow_id, execution_date, session=None):
    import time
    dag_id = 'dag-{}'.format(workflow_id)
    max_runs = conf.getint('core', 'max_active_runs_per_dag')
    rs = DagRun.find(dag_id=dag_id, state=State.RUNNING, session=session)
    if len(rs) >= max_runs:
        raise AirflowException("超过单个工作流最大并行运行数 {}".format(max_runs))
    execution_date_date = dates.from_milliseconds(execution_date)
    recursion_clear_history(dag_id, execution_date_date - timedelta(seconds=1),
                            execution_date_date + timedelta(seconds=1))
    create_dag_run(dag_id, execution_date_date, session=session)
    session.commit()
    while True:
        rs = DagRun.find(dag_id=dag_id, execution_date=execution_date_date)
        dag_run = rs[0]
        if dag_run.state == State.SUCCESS:
            break
        elif dag_run.state != State.RUNNING:
            raise AirflowException("workflow execute error state:{}".format(dag_run.state))
        time.sleep(10)


@provide_session
def recursion_clear_history(dag_id, start_date, end_date, session=None):
    session.query(DagRun).filter(DagRun.execution_date >= start_date,
                                 DagRun.execution_date < end_date,
                                 DagRun.dag_id == dag_id).delete()
    session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id,
                                       TaskInstance.execution_date >= start_date,
                                       TaskInstance.execution_date < end_date).delete()