from collections import deque
from datetime import timedelta

from sqlalchemy import Column, Integer, String, JSON, Index, Boolean
from sqlalchemy.orm.session import Session

from airflow.patsnap.util.dag_util import dag_id_from_workflow_id
from airflow.exceptions import AirflowException
from airflow.models import DagBag
from airflow.models.base import Base
from airflow.models.dag import DAG, DagModel
from airflow.models.idata_task import Task
from airflow.models.idata_workflow_task import WorkFlowTask
from airflow.models.serialized_dag import SerializedDagModel
from airflow.utils import dates
from airflow.utils import timezone
from airflow.utils.dates import to_milliseconds
from airflow.utils.session import provide_session
from airflow.utils.sqlalchemy import UtcDateTime


def check_circle(nodes: list):
    data_out = dict()
    data_in = dict()
    queue = deque()
    count = 0
    out_count = 0
    for node in nodes:
        count += 1
        if node['stateId'] not in data_in:
            data_in[node['stateId']] = 0
        data_out[node['stateId']] = [it['nextStateId'] for it in node.get('branches', [])]
        for sid in data_out[node['stateId']]:
            if sid not in data_in:
                data_in[sid] = 1
            else:
                data_in[sid] += 1
    for sid in data_in:
        if data_in[sid] == 0:
            queue.append(sid)
    while len(queue) > 0:
        sid = queue.popleft()
        out_count += 1
        for next_id in data_out.get(sid, []):
            data_in[next_id] -= 1
            if data_in[next_id] == 0:
                queue.append(next_id)
    if out_count != count:
        raise AirflowException('Workflow 存在死循环')


class WorkFlow(Base):
    # 基础属性
    id = Column(Integer, primary_key=True)
    project_id = Column(Integer)
    folder_id = Column(Integer)
    name = Column(String(128))
    owner = Column(String(128))
    description = Column(String(1024))
    time_out = Column(Integer, default=86400)
    content = Column(JSON)
    parent_id = Column(Integer)
    parent_workflow = Column(String(128))
    parent_project = Column(String(128))
    start_time = Column(UtcDateTime, default=timezone.utcnow)
    end_time = Column(UtcDateTime, default=None)
    update_time = Column(UtcDateTime, default=timezone.utcnow)
    is_active = Column(Boolean, default=True)
    email_alert = Column(Boolean, default=True)
    email = Column(String(255))

    # 调度属性

    is_paused = Column(Boolean, default=True)
    cron = Column(String(128), default='30 2 * * *')
    interval = Column(String(32), default='daily')

    __tablename__ = "idata_workflow"

    __table_args__ = (
        Index('project_name', project_id, name),
        Index('folder_id', folder_id),
        Index('parent_id', parent_id),
    )

    @staticmethod
    def from_dict(data):
        if 'start_time' in data:
            data['start_time'] = dates.from_milliseconds(data['start_time'])
        if 'end_time' in data:
            data['end_time'] = dates.from_milliseconds(data['end_time'])
        return WorkFlow(**data)

    @staticmethod
    @provide_session
    def get_workflow(workflow_id, session: Session = None):
        return session.query(WorkFlow).filter(WorkFlow.id == workflow_id).first()

    @staticmethod
    @provide_session
    def query_workflow(project_id=None, folder_id=None, name=None, session: Session = None):
        qry = session.query(WorkFlow)
        if project_id:
            qry = qry.filter(WorkFlow.project_id == project_id)
        if folder_id:
            qry = qry.filter(WorkFlow.folder_id == folder_id)
        if name:
            qry = qry.filter(WorkFlow.name.like('%' + name + '%'))
        return qry.all()

    @staticmethod
    @provide_session
    def query_task_workflow(task_id, session: Session = None):
        ans = []
        for _, graph in session.query(WorkFlowTask, WorkFlow).filter(WorkFlowTask.workflow_id == WorkFlow.id,
                                                                     WorkFlowTask.task_id == task_id).all():
            ans.append(graph)
        return ans

    @staticmethod
    @provide_session
    def query_sub_workflow(cid: int, active=1, session: Session = None):
        return session.query(WorkFlow).filter(WorkFlow.parent_id == cid, WorkFlow.is_active == active).all()

    @provide_session
    def insert(self, session: Session = None):
        old = session.query(WorkFlow).filter(WorkFlow.project_id == self.project_id, WorkFlow.name == self.name).first()
        if old is not None:
            raise AirflowException('项目中工作流名称重复')
        session.add(self)

    @provide_session
    def sync_dag(self, session=None):
        dag = self.create_dag()
        dag_bag = DagBag(read_dags_from_db=True, store_serialized_dags=True)
        dag_bag.bag_dag(dag, dag)
        dag_bag.sync_to_db(session=session)
        DagModel.get_dagmodel('dag-{}'.format(self.id)).set_is_paused(is_paused=self.is_paused, session=session)

    @provide_session
    def update_content(self, content: dict, session: Session = None):
        assert isinstance(content.get('nodes'), list)
        task_ids = []
        for node in content['nodes']:
            assert node.get('type', None) is not None
            if node['type'] not in ['START', 'END']:
                if not node.get('taskId', None):
                    raise AirflowException('taskId is null ' + str(node))
                if int(node['taskId']) not in task_ids:
                    task_ids.append(int(node['taskId']))
                else:
                    raise AirflowException('同一个工作流中 task 不能重复 {}'.format(node['name']))
                assert node.get('stateId', None) is not None
                assert node.get('name', None) is not None
        check_circle(content['nodes'])
        self.content = content
        session.merge(self)
        WorkFlowTask.update(self.id, task_ids, session=session)

    @provide_session
    def update(self, session: Session = None):
        if self.parent_id is not None and not self.is_paused:
            raise AirflowException('时间调度和依赖调度不能同时开启')
        session.merge(self)

    @staticmethod
    @provide_session
    def delete(workflow_id, session=None) -> None:
        dag_id = 'dag-{}'.format(workflow_id)
        SerializedDagModel.remove_dag(dag_id, session=session)
        WorkFlowTask.update(workflow_id, [], session=session)
        DagModel.deactivate_dag(dag_id)
        session.query(WorkFlow).filter(WorkFlow.id == workflow_id).delete()

    def get_task_ids(self) -> list:
        rs = []
        for nd in self.content['nodes']:
            if nd['node_type'] not in ['START', 'END']:
                rs.append(int(nd['task_id']))
        return rs

    def to_json(self, include_content=True) -> dict:
        data = {
            'id': self.id,
            'dag_id': dag_id_from_workflow_id(self.id),
            'project_id': self.project_id,
            'folder_id': self.folder_id,
            'name': self.name,
            'description': self.description,
            'content': self.content,
            'is_paused': self.is_paused,
            'interval': self.interval,
            'cron': self.cron,
            'start_time': to_milliseconds(self.start_time),
            'end_time': to_milliseconds(self.end_time),
            'parent_project': self.parent_project,
            'parent_id': self.parent_id,
            'email_alert': self.email_alert,
            'email': self.email
        }
        if include_content:
            data['content'] = self.content
        return data

    def content_error(self):
        if not self.content:
            return 'content 不能为空'
        for node in self.content['nodes']:
            if node['type'] not in ['START', 'END']:
                if not node.get('taskId') or node['taskId'] == '':
                    return '节点 taskId 不能为空'
                task = Task.get_task(int(node['taskId']))
                if task is None:
                    return 'task 未找到 {}'.format(node['taskId'])
                if task.error_content() is not None:
                    return '{} {}'.format(task.name, task.error_content())

    def create_dag(self, mock=False) -> DAG:
        '''
                convert task to airflow Dag
                :return:
        '''

        if not self.id and not mock:
            raise AirflowException('invalid workflow Id')

        if not self.content:
            raise AirflowException('invalid workflow content')

        email = None
        if self.email_alert:
            if self.email:
                email = self.email
            elif self.owner:
                email = self.owner + '@.com'

        dag_args = {
            'owner': self.owner,
            'depends_on_past': False,
            'execution_timeout': timedelta(seconds=24 * 3600),
            'email': email
        }

        schedule_interval = '0 0 * * *'
        start_date = dates.sh_now()
        if self.start_time:
            start_date = self.start_time

        if self.cron:
            schedule_interval = self.cron

        one_day = 3600 * 24
        if self.time_out is None or self.time_out == 0:
            self.time_out = one_day
        elif self.time_out > one_day:
            self.time_out = one_day

        dag = DAG(
            'dag-{}'.format(self.id),
            default_args=dag_args,
            start_date=start_date,
            description=self.description,
            schedule_interval=schedule_interval,
            is_paused_upon_creation=True,
            dagrun_timeout=timedelta(seconds=self.time_out)
        )

        task_map = dict()

        for node in self.content['nodes']:
            if node['type'] not in ['START', 'END']:
                assert node.get('taskId', None) is not None
                task = Task.get_task(int(node['taskId']))
                if task is None:
                    raise AirflowException('task id not found {}'.format(node['taskId']))
                task_map[node['stateId']] = task.create_operator(dag)

        for node in self.content['nodes']:
            if node['type'] not in ['START', 'END']:
                stateId = node['stateId']
                if node.get('branches', None):
                    for branch in node['branches']:
                        if branch['nextStateId'] in task_map.keys():
                            task_map[stateId].set_downstream(task_map[branch['nextStateId']])
        return dag
