import logging

from flask import Blueprint
from flask import jsonify
from flask import request
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.session import Session

from airflow.patsnap.service import workflow_service, backfill_service
from airflow.configuration import conf
from airflow.models.dagrun import DagRun
from airflow.models.idata_workflow import WorkFlow
from airflow.utils import dates
from airflow.utils.session import provide_session
from airflow.utils.state import State
from airflow.www.api import exception_handle

log = logging.getLogger(__name__)

workflow_bp = Blueprint('workflow_bp', __name__)


@workflow_bp.route('/workflows/<int:workflow_id>', methods=['GET'])
@exception_handle
def get_workflow(workflow_id):
    workflow = WorkFlow.get_workflow(workflow_id)
    if workflow:
        return jsonify(code=0, message='ok', data=workflow.to_json())
    else:
        return jsonify(code=404, message='task not found', data=None)


@workflow_bp.route('/workflows', methods=['GET'])
@exception_handle
def query_workflow():
    project_id = request.args.get('project_id', None)
    folder_id = request.args.get('folder_id', None)
    if project_id is None and folder_id is None:
        return jsonify(code=1, message='project_id or folder_id 不能为空', data=None)
    name = request.args.get('name', None)
    tasks = WorkFlow.query_workflow(project_id=project_id, folder_id=folder_id, name=name)
    rs = [task.to_json() for task in tasks]
    return jsonify(code=0, message='ok', data=rs)


@workflow_bp.route('/workflows/<int:workflow_id>/history', methods=['GET'])
@exception_handle
def workflow_history(workflow_id):
    dag_id = 'dag-{}'.format(workflow_id)
    return jsonify(code=0, message='ok', data=workflow_service.workflow_history(dag_id))


@workflow_bp.route('/workflows/scheduler_interval', methods=['GET'])
@exception_handle
def get_scheduler_interval():
    return jsonify(code=0, message='ok', data=workflow_service.scheduler_interval)


@workflow_bp.route('/workflows/default_cron', methods=['GET'])
@exception_handle
def get_default_cron():
    interval = request.args.get('interval', None)
    if interval:
        return jsonify(code=0, message='ok', data=workflow_service.get_default_cron(interval))
    else:
        return jsonify(code=400, message='bad request interval not found', data=None)


@workflow_bp.route('/workflows', methods=['POST'])
@exception_handle
def insert():
    data = request.get_json()
    workflow = WorkFlow.from_dict(data)
    try:
        workflow.insert()
    except IntegrityError as err:
        return jsonify(code=500, message=str(err), data=None)
    return jsonify(code=0, message='ok', data=workflow.to_json())


@workflow_bp.route('/workflows/<int:workflow_id>', methods=['PUT'])
@exception_handle
def update(workflow_id):
    data = request.get_json()
    if not 'id' in data:
        data['id'] = workflow_id
    workflow = WorkFlow.from_dict(data)
    workflow.update()
    return jsonify(code=0, message='ok', data=workflow.to_json())


@workflow_bp.route('/workflows/<int:workflow_id>', methods=['DELETE'])
@exception_handle
def delete(workflow_id):
    WorkFlow.delete(workflow_id)
    return jsonify(code=0, message='ok', data=None)


@workflow_bp.route('/workflows/backfill/trigger', methods=['POST'])
@exception_handle
def back_fill_trigger():
    data = request.get_json()
    dag_graph_id = data.get('dag_graph_id', None)
    start_date = data.get('start_date', None)
    end_date = data.get('end_date', None)
    is_synchronous = data.get('is_synchronous',  False)
    if None in [dag_graph_id, start_date, end_date]:
        return jsonify(code=400, message='dag_graph_id or start_date or end_date is None', data=None)

    start_date = dates.from_milliseconds(start_date)
    end_date = dates.from_milliseconds(end_date)
    backfill_service.run_in_pod('dag-{}'.format(dag_graph_id), start_date, end_date, is_synchronous)
    return jsonify(code=0, message='ok', data=None)


@workflow_bp.route('/workflows/backfill/<int:job_id>/kill', methods=['POST'])
@exception_handle
def back_kill(job_id):
    backfill_service.kill_pod(job_id)
    return jsonify(code=0, message='ok', data=None)


@workflow_bp.route('/workflows/backfill/<int:workflow_id>', methods=['GET'])
@exception_handle
def back_query(workflow_id):
    dag_id = 'dag-{}'.format(workflow_id)
    return jsonify(code=0, message='ok', data=backfill_service.query_job(dag_id))


@workflow_bp.route('/workflows/trigger', methods=['POST'])
@exception_handle
@provide_session
def workflow_trigger(session: Session = None):
    data = request.get_json()
    dag_graph_id = data.get('dag_graph_id', None)
    execution_date = data.get('execution_date', None)

    if None in [dag_graph_id, execution_date]:
        return jsonify(code=400, message='dag_graph_id or execution_date is None', data=None)

    dag_id = 'dag-{}'.format(dag_graph_id)

    max_runs = conf.getint('core', 'max_active_runs_per_dag')
    rs = DagRun.find(dag_id=dag_id, state=State.RUNNING, session=session)
    if len(rs) >= max_runs:
        return jsonify(code=1, message='max_active_runs_per_dag exceed {}'.format(max_runs), data=None)

    execution_date = dates.from_milliseconds(execution_date)
    workflow_service.create_dag_run(dag_id, execution_date, session=session)
    return jsonify(code=0, message='ok', data=None)


@workflow_bp.route('/workflows/kill', methods=['POST'])
@exception_handle
def workflow_kill():
    data = request.get_json()
    dag_graph_id = data.get('dag_graph_id', None)
    execution_date = data.get('execution_date', None)
    if None in [dag_graph_id, execution_date]:
        return jsonify(code=400, message='dag_graph_id or execution_date is None', data=None)

    execution_date = dates.from_milliseconds(execution_date)
    workflow_service.mark_dag_failed('dag-{}'.format(dag_graph_id), execution_date)
    return jsonify(code=0, message='ok', data=None)
