import click
import os
import platform
import sys
from collections import OrderedDict

import json
import pandas as pd
from common.database import SessionLocal
from datetime import datetime, time

from app.fault_self_recovery.handlers.fsr_host_disk_handler import FSRHostDiskHandler
from app.fault_self_recovery.models.fsr_host_disk import FSRHostDisk
from app.fault_self_recovery.handlers.fsr_rabbitmq_queue_handler import FSRRabbitmqQueueHpaHandler
from app.fault_self_recovery.models.fsr_rabbitmq_queue import FSRRabbitmqQueue
from app.fault_self_recovery.handlers.fsr_container_hpa_handler import FSRContainerHpaHandler
from app.fault_self_recovery.models.fsr_container_hpa import FSRContainerHPA

from app.monitor_metrics.models.prom_host_info import PromHostInfo

from core.logger import logger
from common.welab_k8s import init_k8s
from common.welab_k8s.k8s_node import K8sNode


class FSRClick(object):
    def __init__(self):
        self.session = SessionLocal()

    def __del__(self):
        self.session.close()



    def update_host_disk(self, host_disks_info):
        fsr_host_disk_handler = FSRHostDiskHandler()
        for data in host_disks_info:
            host_disk_object = FSRHostDisk.get_object_by_ip_mountpoint(self.session, data["ip"], data["mountpoint"])
            if host_disk_object:
                # 如果存在, 则更新
                data["uuid"] = host_disk_object.uuid
                result = fsr_host_disk_handler.update_host_disk(self.session, data)
                if not result:
                    logger.error(f"update host disk to database failed")
                    return False
                logger.info(f"{data['ip']}:{data['mountpoint']} was exist, update success")
                continue

            result = fsr_host_disk_handler.add_host_disk(self.session, data)
            if not result:
                logger.error(f"add {data['ip']}:{data['mountpoint']} to database failed")
                return False
            logger.info(f"{data['ip']}:{data['mountpoint']} add success")

        return True


    def update_host_disk_from_excel(self, file_path: str):
        try:
            host_disks_info = []

            df = pd.read_excel(file_path, "Sheet1")
            for i in df.index.values:
                row_data = df.loc[i].to_dict()
                host_disks_info.append(row_data)

            result = self.update_host_disk(host_disks_info)
            if not result:
                logger.error(f"update host disk to database failed")
                return False
        except Exception as e:
            logger.exception(e)
            return False

        logger.info("update host disk from excel success")
        return True


    def update_host_disk_from_k8s(self):
        """
        从k8s同步服务器服务信息到数据库
        """
        init_k8s()
        k8s_node = K8sNode()
        k8s_nodes = k8s_node.list_node()
        if not k8s_nodes:
            logger.error(f"get k8s node list error")
            return False

        host_infos = PromHostInfo.get_all_host_info(self.session)
        host_infos = {
            host_info["ip"]: host_info["name"]
            for host_info in host_infos
        }

        host_disks_info = []
        for k8s_node in k8s_nodes:
            if "zone" not in k8s_node.metadata.labels or k8s_node.metadata.labels["zone"] != "webapp":
                # 非业务节点，不录入
                continue

            # 获取node的状态
            for condition in k8s_node.status.conditions:
                if condition.type == "Ready" and condition.status == "True":
                    break
            else:
                continue

            host_disk_info = {
                "classes": "业务容器",
                "script": "cleandisk",
                "mountpoint": "all"
            }
            # 获取node的ip地址
            for addr in k8s_node.status.addresses:
                if addr.type == "InternalIP":
                    host_disk_info["ip"] = addr.address
                    host_disk_info["name"] = host_infos[host_disk_info["ip"]]
                    break
            else:
                continue

            host_disks_info.append(host_disk_info)

        result = self.update_host_disk(host_disks_info)
        if not result:
            logger.error(f"update host disk to database failed")
            return False

        logger.info("update host disk from excel success")
        return True



    def update_rabbitmq_queue_from_excel(self, file_path: str):
        try:
            rabbitmq_queue_info = []
            df = pd.read_excel(file_path, "Sheet1")
            for i in df.index.values:
                row_data = df.loc[i].to_dict()
                del row_data["consumer_ip"]
                if row_data["classes"] == "ECS服务器":
                    row_data["number"] = 0
                    row_data["max"] = 0
                    row_data["state"] = 0
                rabbitmq_queue_info.append(row_data)

            fsr_rabbitmq_queue_handler = FSRRabbitmqQueueHpaHandler()
            for data in rabbitmq_queue_info:
                rabbitmq_queue_object = FSRRabbitmqQueue.get_object_by_cluster_vhost_name(self.session, data["cluster"], data["vhost"], data["name"])
                if rabbitmq_queue_object:
                    # 如果存在, 则更新
                    data["uuid"] = rabbitmq_queue_object.uuid
                    result = fsr_rabbitmq_queue_handler.update_rabbitmq_queue(self.session, data)
                    if not result:
                        logger.error(f"update rabbitmq queue to database failed")
                        return False
                    logger.info(f"{data['vhost']}:{data['name']} was exist, update success")
                    continue

                result = fsr_rabbitmq_queue_handler.add_rabbitmq_queue(self.session, data)
                if not result:
                    logger.error(f"add {data['vhost']}:{data['name']} to database failed")
                    return False
                logger.info(f"{data['vhost']}:{data['name']} add success")
        except Exception as e:
            logger.exception(e)
            return False

        logger.info("update rabbitmq queue from excel success")
        return True





    def update_container_hpa_from_excel(self, file_path: str):
        try:
            container_hpa_info = []
            df = pd.read_excel(file_path, "Sheet1")
            for i in df.index.values:
                row_data = df.loc[i].to_dict()
                container_hpa_info.append(row_data)

            fsr_container_hpa_handler = FSRContainerHpaHandler()
            for data in container_hpa_info:
                container_hpa_object = FSRContainerHPA.get_object_by_name(self.session, data["name"])
                if container_hpa_object:
                    # 如果存在, 则更新
                    data["uuid"] = container_hpa_object.uuid
                    result = fsr_container_hpa_handler.update_container_hpa(self.session, data)
                    if not result:
                        logger.error(f"update container hpa to database failed")
                        return False
                    logger.info(f"server:{data['name']} was exist, update success")
                    continue

                result = fsr_container_hpa_handler.add_container_hpa(self.session, data)
                if not result:
                    logger.error(f"add server{data['name']} to database failed")
                    return False
                logger.info(f"server:{data['name']} add success")
        except Exception as e:
            logger.exception(e)
            return False

        logger.info("update container hpa from excel success")
        return True




def main():
    fsr_click = FSRClick()
    # file_path = "host_disk.xlsx"
    # fsr_click.update_host_disk_from_excel(file_path)
    # fsr_click.update_host_disk_from_k8s()

    # file_path = "rabbitmq-prod.xlsx"
    # fsr_click.update_rabbitmq_queue_from_excel(file_path)

    file_path = "container_hpa.xlsx"
    fsr_click.update_container_hpa_from_excel(file_path)


if __name__ == "__main__":
    main()
