from datetime import timedelta

from sqlalchemy import Column, Integer, String, JSON, Index, Boolean
from sqlalchemy.orm.session import Session

from airflow.exceptions import AirflowException
from airflow.models import DagBag
from airflow.models.base import Base
from airflow.models.dag import DAG, DagModel
from airflow.models.idata_task import Task
from airflow.models.idata_workflow_task import WorkFlowTask
from airflow.models.serialized_dag import SerializedDagModel
from airflow.utils import dates
from airflow.utils import timezone
from airflow.utils.dates import to_milliseconds
from airflow.utils.session import provide_session
from airflow.utils.sqlalchemy import UtcDateTime


class WorkFlow(Base):
    # 基础属性
    id = Column(Integer, primary_key=True)
    project_id = Column(Integer)
    folder_id = Column(Integer)
    name = Column(String(128))
    owner = Column(String(128))
    description = Column(String(1024))
    time_out = Column(Integer)
    content = Column(JSON)
    parent_id = Column(Integer)
    start_time = Column(UtcDateTime, default=timezone.utcnow)
    end_time = Column(UtcDateTime, default=timezone.utcnow)
    update_time = Column(UtcDateTime, default=timezone.utcnow)
    is_active = Column(Boolean, default=True)
    email_alert = Column(Boolean, default=True)
    email = Column(String(255))

    # 调度属性

    is_paused = Column(Boolean, default=True)
    cron = Column(String(128))
    interval = Column(String(32))

    __tablename__ = "idata_workflow"

    __table_args__ = (
        Index('project_name', project_id, name),
        Index('folder_id', folder_id),
        Index('parent_id', parent_id),
    )

    @staticmethod
    def from_dict(data):
        if 'start_time' in data:
            data['start_time'] = dates.from_milliseconds(data['start_time'])
        if 'end_time' in data:
            data['end_time'] = dates.from_milliseconds(data['end_time'])
        return WorkFlow(**data)

    @staticmethod
    @provide_session
    def get_workflow(graph_id, session: Session = None):
        return session.query(WorkFlow).filter(WorkFlow.id == graph_id).first()

    @staticmethod
    @provide_session
    def query_workflow(project_id=None, folder_id=None, name=None, session: Session = None):
        qry = session.query(WorkFlow)
        if project_id:
            qry = qry.filter(WorkFlow.project_id == project_id)
        if folder_id:
            qry = qry.filter(WorkFlow.folder_id == folder_id)
        if name:
            qry = qry.filter(WorkFlow.name.like('%' + name + '%'))
        return qry.all()

    @staticmethod
    @provide_session
    def query_task_workflow(task_id, session: Session = None):
        ans = []
        for _, graph in session.query(WorkFlowTask, WorkFlow).filter(WorkFlowTask.workflow_id == WorkFlow.id,
                                                                     WorkFlowTask.task_id == task_id).all():
            ans.append(graph)
        return ans

    @staticmethod
    @provide_session
    def query_sub_workflow(cid: int, active=1, session: Session = None):
        return session.query(WorkFlow).filter(WorkFlow.parent_id == cid, WorkFlow.is_active == active).all()

    @provide_session
    def insert(self, session: Session = None):
        if self.is_paused is not None and self.project_id and not self.is_paused:
            raise AirflowException('sub workflow can not enable scheduler')
        self.create_dag(mock=True)
        session.add(self)
        session.flush()
        WorkFlowTask.update(self.id, self.get_task_ids(), session=session)
        dag = self.create_dag()
        dag_bag = DagBag(read_dags_from_db=True, store_serialized_dags=True)
        dag_bag.bag_dag(dag, dag)
        dag_bag.sync_to_db(session=session)

    @provide_session
    def update(self, session: Session = None):
        assert self.id is not None
        if self.is_paused is not None and self.project_id and not self.is_paused:
            raise AirflowException('sub workflow can not enable scheduler')
        self.create_dag(mock=True)
        session.merge(self)
        WorkFlowTask.update(self.id, self.get_task_ids(), session=session)
        dag = self.create_dag()
        dag_bag = DagBag(read_dags_from_db=True, store_serialized_dags=True)
        dag_bag.bag_dag(dag, dag)
        dag_bag.sync_to_db(session=session)
        dag_model = session.query(DagModel).filter(DagModel.dag_id == 'dag-{}'.format(self.id)).first()
        if dag_model:
            dag_model.set_is_paused(self.is_paused, including_subdags=False, session=session)

    @staticmethod
    @provide_session
    def delete(workflow_id, session=None) -> None:
        dag_id = 'dag-{}'.format(workflow_id)
        SerializedDagModel.remove_dag(dag_id, session=session)
        WorkFlowTask.update(workflow_id, [], session=session)
        DagModel.deactivate_dag(dag_id)
        session.query(WorkFlow).filter(WorkFlow.id == workflow_id).delete()

    def get_task_ids(self) -> list:
        rs = []
        for nd in self.content['nodes']:
            if nd['node_type'] not in ['START', 'END']:
                rs.append(int(nd['task_id']))
        return rs

    def to_json(self, include_content=True) -> dict:
        data = {
            'id': self.id,
            'project_id': self.project_id,
            'folder_id': self.folder_id,
            'name': self.name,
            'description': self.description,
            'is_active': self.is_active,
            'is_paused': self.is_paused,
            'cron': self.cron,
            'start_time': to_milliseconds(self.start_time),
            'update_time': to_milliseconds(self.update_time),
            'email_alert': self.email_alert,
            'email': self.email
        }
        if include_content:
            data['content'] = self.content
        return data

    def create_dag(self, mock=False) -> DAG:
        '''
                convert task to airflow Dag
                :return:
        '''

        if not self.id and not mock:
            raise AirflowException('invalid workflow Id')

        email = None
        if self.email_alert:
            if self.email:
                email = self.email
            elif self.owner:
                email = self.owner + '@4.com'

        dag_args = {
            'owner': self.owner,
            'depends_on_past': False,
            'execution_timeout': timedelta(seconds=24 * 3600),
            'email': email
        }

        schedule_interval = '0 0 * * *'
        start_date = dates.sh_now()
        if self.start_time:
            start_date = self.start_time

        if self.cron:
            schedule_interval = self.cron

        one_day = 3600 * 24
        if self.time_out is None or self.time_out == 0:
            self.time_out = one_day
        elif self.time_out > one_day:
            self.time_out = one_day

        dag = DAG(
            'dag-{}'.format(self.id),
            default_args=dag_args,
            start_date=start_date,
            description=self.description,
            schedule_interval=schedule_interval,
            is_paused_upon_creation=True,
            dagrun_timeout=timedelta(seconds=self.time_out)
        )

        data = dict()

        for node in self.content['nodes']:
            if node['node_type'] not in ['START', 'END']:
                assert node.get('task_id', None) is not None
                task = Task.get_task(int(node['task_id']))
                if task is None:
                    raise AirflowException('task id not found {}'.format(node['task_id']))
                data[node['id']] = task.create_operator(dag)

        for edge in self.content['edges']:
            if edge['source'] in data and edge['target'] in data:
                source = data[edge['source']]
                target = data[edge['target']]
                source.set_downstream(target)

        return dag
