import logging
from functools import wraps
from typing import Callable, TypeVar, cast

from flask import Blueprint, current_app, request
from flask import jsonify

from airflow.patsnap.service import project_service, datasource_service, env_service
from airflow.exceptions import AirflowException
from airflow.models.idata_env import Env
from airflow.www.api import provide_user

log = logging.getLogger(__name__)

T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name


def requires_authentication(function: T):
    """Decorator for functions that require authentication"""

    @wraps(function)
    def decorated(*args, **kwargs):
        return current_app.api_auth.requires_authentication(function)(*args, **kwargs)

    return cast(T, decorated)


api_speech_env = Blueprint('api_speech_env', __name__)


# user route
@api_speech_env.route('/envs/<int:id>', methods=['GET'])
def get_env(id):
    try:
        env = Env.get_env(id)
        return jsonify(code=0, message='ok', data=env.to_json())
    except AirflowException as e:
        log.error("get_env error =%s, params=%s", e, id, )
        return jsonify(code=500, message=str(e), data=None)


@api_speech_env.route('/envs', methods=['GET'])
def get_envs():
    try:
        page = request.values.get('page', 0, type=int)
        env_type = request.values.get("type", type=str, default=None)
        result = env_service.get_envs(page, env_type)
        return jsonify(code=0, message='ok', data=result)
    except AirflowException as e:
        log.error("get_params error =%s", e, )
        return jsonify(code=500, message=str(e), data=None)


@api_speech_env.route('/envs', methods=['POST'])
@provide_user
def add_env(oa_user_name):
    try:
        name = request.values.get("name", type=str, default=None)
        env_type = request.values.get("type", type=int, default=None)
        etl_path = request.values.get("etl_path", type=str, default='')
        common_jar_path = request.values.get("common_jar_path", type=str, default='')
        kafka_jar_path = request.values.get("kafka_jar_path", type=str, default='')
        temporary_file_path = request.values.get("temporary_file_path", type=str, default='')
        jdbc_jar_path = request.values.get("jdbc_jar_path", type=str, default='')
        mongodb_jar_path = request.values.get("mongodb_jar_path", type=str, default='')
        checkpoint_path = request.values.get("checkpoint_path", type=str, default='')
        decoder_path = request.values.get("decoder_path", type=str, default='')
        hive_jar_path = request.values.get("hive_jar_path", type=str, default='')
        es_jar_path = request.values.get("es_jar_path", type=str, default='')
        binary_path = request.values.get("binary_path", type=str, default=None)
        docker_image = request.values.get("docker_image", type=str, default='')

        if name is None:
            log.error(
                "add_env params error name={%s}",name)
            return jsonify(code=400, message="add_projects params has Node,", data=None)
        env_service.add_env(name, env_type, etl_path, common_jar_path, kafka_jar_path, temporary_file_path, jdbc_jar_path, mongodb_jar_path,
                              checkpoint_path, decoder_path, hive_jar_path, es_jar_path, binary_path, docker_image)
        return jsonify(code=0, message='ok', data=None)
    except AirflowException as e:
        log.error("add_env error =%s", e)
        return jsonify(code=500, message=str(e), data=None)


@api_speech_env.route('/envs/<int:id>', methods=['DELETE'])
def delete_env(id):
    try:
        Env.delete_env(id)
        return jsonify(code=0, message='ok', data=None)
    except AirflowException as e:
        log.error("delete_env error =%s", e)
        return jsonify(code=500, message=str(e), data=None)


@api_speech_env.route('/envs', methods=['PUT'])
@provide_user
def update_env(oa_user_name):
    try:
        env_id = request.values.get("id", type=int, default=None)
        name = request.values.get("name", type=str, default=None)
        env_type = request.values.get("type", type=int, default=None)
        etl_path = request.values.get("etl_path", type=str, default='')
        common_jar_path = request.values.get("common_jar_path", type=str, default='')
        kafka_jar_path = request.values.get("kafka_jar_path", type=str, default='')
        temporary_file_path = request.values.get("temporary_file_path", type=str, default='')
        jdbc_jar_path = request.values.get("jdbc_jar_path", type=str, default='')
        mongodb_jar_path = request.values.get("mongodb_jar_path", type=str, default='')
        checkpoint_path = request.values.get("checkpoint_path", type=str, default='')
        decoder_path = request.values.get("decoder_path", type=str, default='')
        hive_jar_path = request.values.get("hive_jar_path", type=str, default='')
        es_jar_path = request.values.get("es_jar_path", type=str, default='')
        binary_path = request.values.get("binary_path", type=str, default=None)
        docker_image = request.values.get("docker_image", type=str, default='')

        if env_id is None or name is None or env_type is None :
            log.error(
                "update_env params error env_id={%s},name={%s},env_type={%s}",
                env_id, name, env_type)
            return jsonify(code=400, message="update_project params has Node,", data=None)
        env_service.update_env(env_id, name, env_type, etl_path, common_jar_path, kafka_jar_path, temporary_file_path, jdbc_jar_path, mongodb_jar_path,
                                 checkpoint_path, decoder_path, hive_jar_path, es_jar_path, binary_path, docker_image)
        return jsonify(code=0, message='ok', data=None)
    except AirflowException as e:
        log.error("update_env error =%s", e)
        return jsonify(code=500, message=str(e), data=None)
