import datetime

from jinja2 import Environment, FileSystemLoader

from app.bigdata.handlers.impala_handler import get_impala_queries_duration_millis_top, get_impala_queries_memory_aggregate, \
    get_impala_queries_memory_estimated, get_impala_queries_thread_cpu_time
from app.bigdata.handlers.impala_compute_stats_handler import compute_stats_handler
from app.bigdata.handlers.cm_monitor_handler import ClouderaMonitorHandler, check_service_status
from app.bigdata.handlers.yarn_handler import get_yarn_job_duration_top, get_yarn_job_cpu_top
from app.bigdata.handlers.kudu_handler import KuduInfo, kudu_capacity_report_handler
from app.bigdata.handlers.hive_handler import HiveHandler, hive_capacity_report_handler
from app.bigdata.handlers.impala_handler import impala_queries_report_handler
from app.bigdata.handlers.yarn_handler import yarn_jobs_report_handler
from app.bigdata.handlers.bdl_disk_capacity_handler import bdl_disk_capacity_handler, disk_hdfs_capacity_report_handler, \
    disk_kudu_capacity_report_handler
from app.bigdata.models.hive_table_info import HiveTableInfo
from app.bigdata.models.kudu_table_info import KuduTableInfo
from app.bigdata.models.impala_query_info import ImpalaQueryInfo
from app.bigdata.models.bdl_disk_capacity_info import BdlDiskCapacityInfo
from app.bigdata.models.impala_query_compute_stat import ImpalaQueryComputeStat
from app.bigdata.models.yarn_job_info import YarnJobInfo
from app.bigdata.utils import time_interval_ranges
from common.database import SessionLocal
from common.notice_tools import NoticeTools
from common.message import Message
from core.celery import celery_app, TaskMonitor
from core.logger import logger

from setting import config


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_hive_export_data_async(self):
    session = SessionLocal()
    try:
        hive_handler = HiveHandler()
        items = hive_handler.get_tables_info()
        for item in items:
            HiveTableInfo.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_kudu_export_data_async(self):
    session = SessionLocal()
    try:
        kudu_info = KuduInfo()
        items = kudu_info.get_tables_info()
        for item in items:
            KuduTableInfo.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_bdl_disk_capacity_info_async(self):
    session = SessionLocal()
    try:
        items = bdl_disk_capacity_handler()
        for item in items:
            BdlDiskCapacityInfo.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_impala_export_data_async(self, past_day=1):
    session = SessionLocal()
    import re
    rc = re.compile(r"^upsert\sinto\spg_exdata.*$")
    try:
        from datetime import datetime, timedelta
        past_datatime = (datetime.now() - timedelta(days=past_day))
        begin_time = past_datatime.strftime("%Y-%m-%d 00:00:00")
        end_time = (past_datatime + timedelta(days=1)).strftime("%Y-%m-%d 08:00:00")
        datetime_ranges = time_interval_ranges(begin_time, end_time, 60)
        for datetime_range in datetime_ranges:
            items = impala_queries_report_handler(datetime_range.get("begin_time"), datetime_range.get("end_time"))
            for item in items:
                if rc.search(item.get("statement")):
                    continue
                if ImpalaQueryInfo.get_response_by_query_id(session, item.get("query_id")):
                    ImpalaQueryInfo.query_update(session, item)
                else:
                    ImpalaQueryInfo.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_impala_compute_stats_export_data_async(self):
    session = SessionLocal()
    try:
        from datetime import datetime, timedelta
        now = datetime.now()
        begin_time = now.strftime("%Y-%m-%d 00:00:00")
        end_time = (now + timedelta(days=0)).strftime("%Y-%m-%d 05:00:00")
        items = compute_stats_handler(session, begin_time, end_time)
        for item in items:
            ImpalaQueryComputeStat.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_yarn_export_data_async(self, past_day=1):
    session = SessionLocal()
    try:
        from datetime import datetime, timedelta
        past_datatime = (datetime.now() - timedelta(days=past_day))
        begin_time = past_datatime.strftime("%Y-%m-%d 00:00:00")
        end_time = (past_datatime + timedelta(days=1)).strftime("%Y-%m-%d 00:00:00")
        datetime_ranges = time_interval_ranges(begin_time, end_time, 240)
        for datetime_range in datetime_ranges:
            items = yarn_jobs_report_handler(datetime_range.get("begin_time"), datetime_range.get("end_time"))
            for item in items:
                YarnJobInfo.add(session, item)
    except Exception as e:
        logger.error(e)
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_capacity_report_async(self, days=6):
    session = SessionLocal()
    try:
        from datetime import datetime, timedelta
        now = datetime.now()
        start_date = (now - timedelta(days=days)).strftime("%Y-%m-%d")
        end_date = now.strftime("%Y-%m-%d")
        logger.info(f"bigdata capacity report handler => datetime_range: {start_date}-{end_date}")
        hdfs_disk_column, hdfs_disk_content = disk_hdfs_capacity_report_handler(session, start_date, end_date)
        kudu_disk_column, kudu_disk_content = disk_kudu_capacity_report_handler(session, start_date, end_date)
        hive_column, hive_content = hive_capacity_report_handler(start_date, end_date, session)
        kudu_column, kudu_content = kudu_capacity_report_handler(start_date, end_date, session)

        # hive_column 或者 kudu_column 数据为空
        if not hive_content or not kudu_content:
            logger.error(
                f"bigdata_hive or bigdata_kudu: 数据为空值 => hive_data_size: {len(hive_content)} kudu_data_size:{len(kudu_content)}")
            return False
        env = Environment(loader=FileSystemLoader(config.TEMPLATE_ROOT))
        template = env.get_template('bigdata/database_capacity.html')  # 获取一个模板文件
        data = template.render(
            {'hive_databases_column': hive_column, 'hive_databases_content': hive_content, 'kudu_databases_column': kudu_column,
             'kudu_databases_content': kudu_content, 'kudu_disk_column': kudu_disk_column, 'kudu_disk_content': kudu_disk_content,
             'hdfs_disk_column': hdfs_disk_column, 'hdfs_disk_content': hdfs_disk_content,
             "ops_http_url": config.OPS_HTTP_URL, "start_date": start_date, "end_date": end_date}).replace("\n", "")
        Message.send_email(
            email_name=["space.wen@welab-inc.com", "yuchang.yang@welab-inc.com", "alan.ding@welab-inc.com", "boll.lai@welab-inc.com",
                        "jet.wen@welab-inc.com", "bi@welab-inc.com"],
            subject=f"青鸾大数据容量报告{start_date}~{end_date}", html=data)
        logger.info(f"bigdata_capacity_report_async: 任务启动成功")
        return True
    except Exception as e:
        logger.error(f"bigdata_capacity_report_async: 任务启动失败 => {e}")
        return False
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_impala_report_async(self, days=1):
    session = SessionLocal()
    try:
        from datetime import datetime, timedelta
        now = datetime.now()
        start_datatime = (now - timedelta(days=days)).strftime('%Y-%m-%d 00:00:00')
        end_datatime = now.strftime('%Y-%m-%d 08:00:00')
        duration_column, duration_content = get_impala_queries_duration_millis_top(session, start_datatime, end_datatime, top=10)
        aggregate_column, aggregate_content = get_impala_queries_memory_aggregate(session, start_datatime, end_datatime, top=10)
        estimated_column, estimated_content = get_impala_queries_memory_estimated(session, start_datatime, end_datatime, top=10)
        thread_column, thread_content = get_impala_queries_thread_cpu_time(session, start_datatime, end_datatime, top=10)

        # duration_content 或者 aggregate_content 或者 estimated_content 或者 thread_content 数据为空
        if not duration_content or not aggregate_content or not estimated_content or not thread_content:
            logger.error(f"duration_content or aggregate_content or estimated_content or thread_content : 数据为空值 => "
                         f"duration_data_size: {len(duration_content)} aggregate_data_size:{len(aggregate_content)} "
                         f"estimated_data_size:{len(estimated_content)} thread_content_size:{len(thread_content)}")
            return False
        env = Environment(loader=FileSystemLoader(config.TEMPLATE_ROOT))
        template = env.get_template('bigdata/impala_query.html')  # 获取一个模板文件
        data = template.render(
            {'duration_column': duration_column, 'duration_content': duration_content, 'aggregate_column': aggregate_column,
             'aggregate_content': aggregate_content, 'estimated_column': estimated_column, 'estimated_content': estimated_content,
             'thread_column': thread_column, 'thread_content': thread_content}).replace("\n", "")
        Message.send_email(
            email_name=["space.wen@welab-inc.com", "yuchang.yang@welab-inc.com", "alan.ding@welab-inc.com", "boll.lai@welab-inc.com",
                        "jet.wen@welab-inc.com", "bi@welab-inc.com"],
            subject=f"青鸾impala SQL使用报告{start_datatime}~{end_datatime}",
            html=data)
        logger.info(f"bigdata_impala_report_async: 任务启动成功")
        return True
    except Exception as e:
        logger.error(f"bigdata_impala_report_async: 任务启动失败 => {e}")
        return False
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_yarn_report_async(self, days=0):
    session = SessionLocal()
    try:
        from datetime import datetime, timedelta
        now = datetime.now()
        date = (now - timedelta(days=days)).strftime("%Y-%m-%d")
        duration_column, duration_content = get_yarn_job_duration_top(date, session, top=10)
        cpu_column, cpu_content = get_yarn_job_cpu_top(date, session, top=10)

        # duration_content 或者 cpu_column 数据为空
        if not duration_content or not cpu_content:
            logger.error(
                f"duration_content or cpu_column : 数据为空值 => duration_data_size: {len(duration_content)} cpu_data_size:{len(cpu_content)}")
            return False
        env = Environment(loader=FileSystemLoader(config.TEMPLATE_ROOT))
        template = env.get_template('bigdata/yarn_job.html')  # 获取一个模板文件
        data = template.render(
            {'duration_column': duration_column, 'duration_content': duration_content, 'cpu_column': cpu_column,
             'cpu_content': cpu_content}).replace("\n", "")
        Message.send_email(
            email_name=["space.wen@welab-inc.com", "yuchang.yang@welab-inc.com", "alan.ding@welab-inc.com", "boll.lai@welab-inc.com",
                        "jet.wen@welab-inc.com", "bi@welab-inc.com"],
            subject=f"青鸾yarn Job使用报告{(now - timedelta(days=1)).strftime('%Y-%m-%d')}", html=data)
        logger.info(f"bigdata_yarn_report_async: 任务启动成功")
        return True
    except Exception as e:
        logger.error(f"bigdata_yarn_report_async: 任务启动失败 => {e}")
        return False
    finally:
        session.close()


@celery_app.task(base=TaskMonitor, bind=True)
def bigdata_cloudera_monitor_async(self):
    cloudera_monitor = ClouderaMonitorHandler()
    items = check_service_status(cloudera_monitor)
    now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    for item in items:
        if item['summary'] == "BAD":
            summary = f"`{item['summary']}`"
            content = "[`告警`] 大数据集群CDH组件告警\n>" \
                      f"<font color=\"comment\">告警服务</font>:    <font color=\"comment\">{item['service_name']}</font>\n" \
                      f"<font color=\"comment\">告警时间</font>:    <font color=\"comment\">{now}</font>\n " \
                      f"<font color=\"comment\">监控检查</font>:    <font color=\"comment\">{item['health_check']}</font>\n " \
                      f"<font color=\"comment\">服务状态</font>:    <font color=\"comment\">{summary}</font>\n "
        else:
            summary = f"<font color=\"warning\">{item['summary']}</font>"
            content = "[`告警`] 大数据集群CDH组件告警\n>" \
                      f"<font color=\"comment\">告警服务</font>:    <font color=\"comment\">{item['service_name']}</font>\n" \
                      f"<font color=\"comment\">告警时间</font>:    <font color=\"comment\">{now}</font>\n " \
                      f"<font color=\"comment\">监控项目</font>:    <font color=\"comment\">{item['health_check']}</font>\n " \
                      f"<font color=\"comment\">服务状态</font>:    <font color=\"comment\">{summary}</font>\n "

        NoticeTools.send_wechatbot(key="cde61185-2ed5-436e-b908-94187d64947f", content=content)
        notice_tools = NoticeTools()

        text = "[告警] 大数据集群CDH组件告警\n" \
               f"告警服务:{item['service_name']}\n" \
               f"告警时间:{now}\n" \
               f"监控项目:{item['health_check']}\n " \
               f"服务状态:{item['summary']}\n "

        result = notice_tools.send_sms(mobiles=["18682317773"], content=text)
        if result:
            logger.info(f"bigdata_cloudera_monitor_async  大数据集群CDH组件告警: {item['health_check']}告警短信发送成功")
        else:
            logger.error(f"bigdata_cloudera_monitor_async 大数据集群CDH组件告警: {item['health_check']}告警短信发送失败")
        # notice_tools.send_voice([{"username": "alan.ding", "mobile": "18682317773"}])


if __name__ == "__main__":
    # bigdata_hive_export_data_async()
    # bigdata_impala_export_data_async()
    # bigdata_impala_report_async()
    bigdata_capacity_report_async()
    # bigdata_yarn_export_data_async(interval_min=60)
    # bigdata_kudu_export_data_async()
    # bigdata_impala_export_data_async(past_day=13)
    # bigdata_impala_report_async()
    # bigdata_capacity_report_async()
    # bigdata_yarn_export_data_async()
    # bigdata_yarn_export_data_async(past_day=2)

    # bigdata_yarn_export_data_async()
    # bigdata_cloudera_monitor_async()
    # bigdata_yarn_report_async()
