import concurrent
import datetime
import json
import os
import queue
import threading
import time
from threading import Thread
from typing import List, Any
import psutil as psutil
import taos
from opcua.client.client import Client
from opcua.common.node import Node
from opcua.ua import UaStatusCodeError
from backend.config import opcua_adapter_params
from backend.connectors._redis import get_redis_connection
from backend.connectors.emqx import get_emqx_connection
from backend.connectors.mysql import get_mysql_connection
from backend.connectors.tdengine import get_tdengine_connection
from backend.get_logger import logger
from sqlalchemy import text

emqx_connection = get_emqx_connection.client


class SQLWriter:
    def __init__(self, get_connection_func):
        self._tb_values = {}
        self._tb_tags = {}
        self._conn = get_connection_func()
        self._max_sql_length = self.get_max_sql_length()
        self._conn.execute(text("create database if not exists archive keep 36500"))
        self._conn.execute(text("USE archive"))

    def get_max_sql_length(self):
        return 1024 * 1024

    def process_lines(self, lines: [str]):
        """
        :param lines: [[tbName,ts,val]]
        """
        for line in lines:
            table_name = line[0]
            value = '(' + ",".join(line[1:]) + ') '
            if table_name in self._tb_values:
                self._tb_values[table_name] += value
            else:
                self._tb_values[table_name] = value
        self.flush()

    def flush(self):
        """
        Assemble INSERT statement and execute it.
        When the sql length grows close to MAX_SQL_LENGTH, the sql will be executed immediately, and a new INSERT statement will be created.
        In case of "Table does not exit" exception, tables in the sql will be created and the sql will be re-executed.
        """
        sql = ""
        sql_len = len(sql)
        buf = []
        for tb_name, values in self._tb_values.items():
            q = "INSERT INTO " + tb_name + " VALUES " + values
            if sql_len + len(q) >= self._max_sql_length:
                sql += " ".join(buf)
                self.execute_sql(sql)
                sql = ""
                sql_len = len(sql)
                buf = []
            buf.append(q)
            sql_len += len(q)
        sql += " ".join(buf)
        self.execute_sql(sql)
        self._tb_values.clear()

    def execute_sql(self, sql):
        try:
            self._conn.execute(text(sql))
            self._conn.commit()
        except taos.Error as e:
            self._conn.rollback()
            error_code = e.errno & 0xffff
            # Table does not exit
            if error_code == 9731:
                logger.error("表不存在，先创建数据表!")
            else:
                logger.error("Execute SQL: %s", sql)
                raise e
        except BaseException as baseException:
            self._conn.rollback()
            logger.error("Execute SQL: %s", sql)
            raise baseException

    def close(self):
        if self._conn:
            self._conn.close()


class OPCUAAdapter(object):
    """
    OPCUA数据采集类
    """

    def __init__(self, opcua_info, use_subscribe=True):
        self.opcua_info = opcua_info
        # self.process_running = process_running
        try:
            self.url: str = self.opcua_info['url']
            self.tag_id_to_node_id_map = {opcua['node_id']: opcua['tag_uuid'] for opcua in
                                          opcua_info['opcua_points']}  # nodeid:tagid
            self.tag_id_to_detail_map = {opcua['tag_uuid']: opcua for opcua in opcua_info['opcua_points']}
        except KeyError:
            logger.error(f'opcua info is not correct!')
        self._ua = Client(url=self.url, timeout=5)  # opcua 客户端
        self.connected: bool = False  # opcua 连接成功状态位
        self.thread_list: List[Thread] = []  # 活动的线程表
        self.raw_dict = {}  # 获取正常的数据
        self.alarm_consumer_queue = queue.Queue(maxsize=opcua_adapter_params['alarm_consumer_queue_length'])  # 报警数据消费队列
        self.archive_consumer_queue = queue.Queue(
            maxsize=opcua_adapter_params['archive_consumer_queue_length'])  # 归档数据消费队列
        self.emqx_consumer_queue = queue.Queue(maxsize=opcua_adapter_params['emqx_consumer_queue_length'])  # emqx数据消费队列
        # 读取过来的实时数据映射到redis中

    def connect(self):
        """
        连接函数
        :return:
        """
        try:
            # 判断连接标志位
            if self.connected:
                return
            else:
                # 连接不成功，启动连接
                self._ua.connect()
                # 设置连接标志位
                self.connected = True
                # 打印日志
                logger.info(f"初次连接{self.url}成功！")
                return
        # 判断连接错误的情况
        except concurrent.futures._base.CancelledError:
            self.disconnect()
            self.connected = False
            logger.error('链接失败：CancelledError')
            # 调用重连方法
            self.reconnect()
        except Exception as e:
            self.disconnect()
            self.connected = False
            logger.error("初次连接失败,失败原因：", e)
            # 调用重连方法
            self.reconnect()

    def disconnect(self):
        """
        断开连接
        :return:
        """
        try:
            if self._ua and self.connected:
                self._ua.disconnect()
                self.connected = False
                logger.info("主动断开连接成功")
        except Exception as e:
            logger.error("主动断开连接失败，失败原因：", str(e))

    def reconnect(self):
        """
        重连
        :return:
        """
        index = 0
        # 无限重连
        redis_conn = get_redis_connection()
        # while self.process_running.value:
        while True:
            try:
                flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                flag = bool(int(flag))
            except Exception:

                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not flag:
                # 关闭opcua
                try:
                    self._ua.disconnect()
                    # 设置标志位
                    self.connected = False
                    # 还给连接池
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            try:
                self._ua.connect()
                self.connected = True
                index = 0
                logger.info(f"重连{self.url}成功!")
                return
            except AttributeError as e:
                index += 1
                logger.error(f"第{index}次重连失败,失败原因：{str(e)}!")
                self.connected = False
                time.sleep(index * 1)
                continue
            except ConnectionRefusedError as e:
                index += 1
                logger.error(f"第{index}次重连失败,失败原因：{str(e)}!")
                self.connected = False
                time.sleep(index * 1)
                continue
            except OSError as e:
                index += 1
                logger.error(f"与OPCUA服务器未能建立连接,失败原因:{str(e)}!")
                self.connected = False
                time.sleep(index * 1)
                continue
            except Exception as e:
                index += 1
                logger.error(f"第{index}次重连失败,失败原因：{str(e)}!")
                self.connected = False
                time.sleep(index * 1)
                continue

    def interval_read(self, interval: int) -> None:
        """
        按照采集频率定时去采集
        :param interval:
        :return:
        """
        redis_conn = get_redis_connection()
        thread_name = threading.current_thread().name
        nodes = []
        # while self.process_running.value:

        while True:
            try:
                flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                flag = bool(int(flag))
            except Exception:
                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not flag:
                try:
                    self._ua.disconnect()
                    self.connected = False
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            # 每分钟采集多少次，采集超时多少次，采集node数量，多少个node是None
            start_time = time.time()
            if not self.connected:
                # 如果没有连接成功，开启重连
                self.reconnect()
            else:
                try:
                    nodes_str_list = self.tag_id_to_node_id_map.keys()
                    nodes = [self._ua.get_node(node) for node in nodes_str_list]
                    values = self._ua.get_values(nodes)
                    self.raw_dict = dict(zip(nodes_str_list, values))
                    redis_conn.hmset('opcua_addr', self.raw_dict)
                except AttributeError as e:
                    logger.error(f"属性读取错误:{str(e)}!")
                except TimeoutError:
                    logger.error(f"接收服务端报文超时")
                except ConnectionRefusedError as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f"数据获取失败,失败原因:{str(e)}!")
                except ConnectionAbortedError as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f"数据获取失败,失败原因:{str(e)}!")
                except UaStatusCodeError as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f"数据获取失败,失败原因:{str(e)}!")
                except OSError as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f"数据获取失败,失败原因:{str(e)}!")
                except RuntimeError as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f'运行错误,失败原因:{str(e)}')
                except Exception as e:
                    self.disconnect()
                    self.reconnect()
                    logger.error(f"未捕获到的异常：{str(e)}")
                finally:
                    end_time = time.time()
                    try:
                        # 将性能数据保存到redis中
                        redis_conn.hmset(f'performance_{os.getpid()}',
                                         mapping={'nodes': len(nodes),
                                                  f'{thread_name}_use_time': f'{(end_time - start_time):.2f}'})
                        # 将读取过来的实时数据映射到hash数据表中
                        redis_conn.hmset('opcua_address_space', self.raw_dict)
                        redis_conn.expire(f'performance_{os.getpid()}', 60)
                    except Exception:
                        if redis_conn:
                            redis_conn.close()
                            redis_conn = get_redis_connection()
                    time.sleep(interval)

    def node_write(self, nodes: List[Node], values: List[Any]):
        """
        写入node
        :param nodes:
        :param values:
        :return:
        """
        try:
            self._ua.set_values(nodes, values)
        except Exception as e:
            logger.error(f"数据写入失败,失败原因：{str(e)}")

    def monitor_thread(self):
        """
        监视线程
        :return:
        """
        redis_conn = get_redis_connection()
        # while self.process_running.value:
        while True:
            try:
                try:
                    flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                    flag = bool(int(flag))
                except Exception:
                    redis_conn.close()
                    redis_conn = get_redis_connection()
                    continue
                if not flag:
                    try:
                        self._ua.disconnect()
                        self.connected = False
                        redis_conn.close()
                    except Exception:
                        pass
                    finally:
                        return
                current_process_id = os.getpid()
                process = psutil.Process(current_process_id)
                # 获取进程的基本信息
                process_name = process.name()
                cpu_usage = process.cpu_percent(interval=1)  # 进程的 CPU 使用率，间隔 1 秒
                memory_info = process.memory_info()  # 进程的内存使用情况
                io_counters = process.io_counters()  # 进程的 IO 计数
                disk_usage = psutil.disk_usage('/')  # 获取根目录的磁盘使用情况
                thread_list = []

                for thread in threading.enumerate():
                    thread_list.append((thread.ident, thread.name, thread.is_alive()))
                    if thread.name == 'continuous_thread' and thread.is_alive() == False:
                        logger.error(f'读取线程出错，请尽快联系管理员处理！')
                redis_conn.hmset(name=f'performance_{current_process_id}',
                                 mapping={'process_name': process_name, 'cpu_usage': cpu_usage,
                                          'memory_info_RSS': f'{memory_info.rss / (1024 * 1024):.2f} MB',
                                          'memory_info_VMS': f'{memory_info.vms / (1024 * 1024): .2f} MB',
                                          'io_read': f"{io_counters.read_bytes / (1024 * 1024):.2f} MB",
                                          'io_write': f"{io_counters.write_bytes / (1024 * 1024):.2f} MB",
                                          'disk_usage': f'{disk_usage.percent}%',
                                          'threads': json.dumps(thread_list),
                                          'pid': current_process_id,
                                          'archive_consumer_length': self.archive_consumer_queue.qsize(),
                                          'alarm_consumer_length': self.alarm_consumer_queue.qsize(),
                                          'emqx_consumer_length': self.emqx_consumer_queue.qsize(),
                                          })
                redis_conn.expire(f'performance_{current_process_id}', 60)
            except Exception as e:
                logger.error(f'监控子线程出错:{str(e)}')
                if redis_conn:
                    redis_conn.close()
                    redis_conn = get_redis_connection()
            finally:
                time.sleep(int(opcua_adapter_params['monitor_frequency']))

    def change_data_notifier(self, timestamp, node_id, new_data, old_data):
        """
        :param timestamp:
        :param node:
        :param new_data:
        :param old_data:
        :return:
        """
        try:
            tag_id = self.tag_id_to_node_id_map[node_id]
        except KeyError:
            pass
        else:

            content = {
                'timestamp': timestamp,
                'tag_id': tag_id,
                'new_data': new_data,
                'old_data': old_data
            }

            self.alarm_consumer_queue.put(content)
            self.emqx_consumer_queue.put(content)
            self.archive_consumer_queue.put(content)

    def consumer_alarm_info(self):
        """
        处理报警信息
        :return:
        """
        redis_conn = get_redis_connection()
        alarm_table_name = opcua_adapter_params['alarm_table_name']
        mysql_conn = get_mysql_connection()
        thread_name = threading.current_thread().name
        alarm_count = 0
        # while self.process_running.value:
        while True:
            try:
                flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                flag = bool(int(flag))
            except Exception:
                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not flag:
                try:
                    self._ua.disconnect()
                    self.connected = False
                    mysql_conn.close()
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            try:
                start_time = time.time()
                content = self.alarm_consumer_queue.get()
                tag_detail = self.tag_id_to_detail_map.get(content['tag_id'])
                active_alarm = tag_detail.get('active_alarm')
                if active_alarm:
                    up_limit = tag_detail.get('alarm_up')
                    down_limit = tag_detail.get('alarm_down')
                    if float(content['new_data']) > float(up_limit):
                        sql = f"""insert into {alarm_table_name} (device_name, tag_uuid, tag_name,alarm_message,alarm_limit,value) values ("{self.opcua_info['device_name']}","{content["tag_id"]}", "{tag_detail.get("comments")}", "{tag_detail.get("alarm_up_info")}", "{tag_detail.get("alarm_up")}", "{content["new_data"]}")"""
                        try:
                            mysql_conn.execute(text(sql))
                            mysql_conn.commit()
                            alarm_count += 1
                        except Exception as e:
                            logger.error(f'数据插入错误,错误原因:{str(e)}!')
                            mysql_conn.rollback()
                    elif float(content['new_data']) < float(down_limit):
                        sql = f"""insert into {alarm_table_name} (device_name, tag_uuid, tag_name,alarm_message,alarm_limit,value) values ("{self.opcua_info['device_name']}","{content["tag_id"]}", "{tag_detail.get("comments")}", "{tag_detail.get("alarm_down_info")}", "{tag_detail.get("alarm_down")}", "{content["new_data"]}")"""
                        try:
                            mysql_conn.execute(text(sql))
                            mysql_conn.commit()
                            alarm_count += 1
                        except Exception as e:
                            logger.error(f'数据插入错误,错误原因:{str(e)}!')
                            mysql_conn.rollback()
            except Exception as e:
                logger.error(str(e))
                if mysql_conn:
                    mysql_conn.close()
                    mysql_conn = get_mysql_connection()
            finally:
                end_time = time.time()
                try:
                    redis_conn.hmset(f'performance_{os.getpid()}',
                                     mapping={f'{thread_name}_use_time': f'{(end_time - start_time):.2f}',
                                              'alarm_count': alarm_count})
                except Exception:
                    if redis_conn:
                        redis_conn.close()
                        redis_conn = get_redis_connection()
                time.sleep(2)

    def get_archive_buffer_size(self, buffer: dict[str, List]):
        buffer_size = 0
        for _, v in buffer.items():
            buffer_size += len(v)
        return buffer_size

    def consumer_archive_info(self):
        thread_name = threading.current_thread().name
        redis_conn = get_redis_connection()
        buffer = {}
        td_conn = get_tdengine_connection()
        while True:
            try:
                flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                flag = bool(int(flag))
            except Exception:
                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not flag:
                try:
                    self._ua.disconnect()
                    self.connected = False
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            start_time = time.time()
            try:
                content = self.archive_consumer_queue.get()
                tag_detail = self.tag_id_to_detail_map.get(content['tag_id'])  # 获取到设备的详细信息
                if tag_detail.get('active_archive'):
                    # 如果归档激活
                    timestamp = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')
                    try:
                        # 获取值
                        data = round(float(content['new_data']), 2)
                    except Exception:
                        data = 0.0
                    if self.get_archive_buffer_size(buffer)< 100:
                        buffer.setdefault(content['tag_id'], []).append([timestamp, data])
                    else:
                        total_sql = ""
                        for table_name, values in buffer.items():
                            sql = f'INSERT INTO {table_name} values '
                            for value in values:
                                sql += f'("{value[0]}",{value[1]}),'
                            sql = sql[:-1]
                            sql += ';'
                            total_sql += sql
                        td_conn.execute(text(total_sql))
                        td_conn.commit()
                        buffer.clear()
            except taos.Error as e:
                td_conn.rollback()
                logger.error(f'执行失败:{total_sql}:{e}')
            except BaseException as e:
                td_conn.rollback()
                logger.error(f'执行失败:{total_sql}:{e}')
                td_conn.close()
                td_conn = get_tdengine_connection()
            finally:
                end_time = time.time()
                try:
                    redis_conn.hmset(f'performance_{os.getpid()}', mapping={'buffer': len(buffer),
                                                                            f'{thread_name}_use_time': f'{(end_time - start_time):.2f}'})
                except Exception:
                    if redis_conn:
                        redis_conn.close()
                        redis_conn = get_redis_connection()
                time.sleep(0.5)

    def consumer_emqx_info(self):
        # while self.process_running.value:
        redis_conn = get_redis_connection()
        while True:
            try:
                flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                flag = bool(int(flag))
            except Exception:
                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not flag:
                try:
                    self._ua.disconnect()
                    self.connected = False
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            try:
                content = self.emqx_consumer_queue.get()
                tag_detail = self.tag_id_to_detail_map.get(content['tag_id'])
                mqtt_topic_str = tag_detail.get('mqtt_topic_name')
                topic_list = []
                for topic in mqtt_topic_str.split(';'):
                    topic_name, qos = topic.split(',')
                    topic_list.append({'topic_name': topic_name.split(':')[1].strip().replace('\n', ''),
                                       'qos': qos.split(':')[1].strip().replace('\n', '')})
                payload = json.dumps({content['tag_id']: content['new_data']})
                for topic in topic_list:
                    emqx_connection.publish(topic=topic['topic_name'], payload=payload, qos=int(topic['qos']))
            except Exception as e:
                logger.error(str(e))
                emqx_connection.loop_stop()
                emqx_connection.disconnect()

    def subscribe_data_change(self):

        copy_raw_dict = self.raw_dict.copy()
        flag = False
        # while self.process_running.value:
        redis_conn = get_redis_connection()
        while True:
            try:
                _flag = redis_conn.get(f'task_{self.opcua_info["id"]}_flag')
                _flag = bool(int(_flag))
            except Exception:
                redis_conn.close()
                redis_conn = get_redis_connection()
                continue
            if not _flag:
                try:
                    self._ua.disconnect()
                    self.connected = False
                    redis_conn.close()
                except Exception:
                    pass
                finally:
                    return
            d1_keys = self.raw_dict.keys()
            d2_keys = copy_raw_dict.keys()

            if _ := d1_keys - d2_keys:
                flag = True
                for k in list(_):
                    self.change_data_notifier(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), k,
                                              self.raw_dict[k],
                                              0)
            if _ := d2_keys - d1_keys:
                flag = True
                for k in list(_):
                    self.change_data_notifier(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), k, 0,
                                              self.raw_dict[k])

            commen_keys = d1_keys & d2_keys

            for key in commen_keys:
                if copy_raw_dict[key] != self.raw_dict[key]:
                    self.change_data_notifier(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), key,
                                              copy_raw_dict[key], self.raw_dict[key])
                    flag = True
            if flag:
                copy_raw_dict = self.raw_dict.copy()
                flag = False
            time.sleep(0.5)

    def run(self):
        """
        启动服务
        :return:
        """
        try:
            # 连接opcua服务器
            self.connect()
            # 数据采集线程
            interval_acqusition_task = Thread(target=self.interval_read, name='continuous_thread', args=(1,))
            # 监控线程
            monitor_thread_task = Thread(target=self.monitor_thread, name='monitor_thread')
            # 数据订阅线程
            subscribe_thread_task = Thread(target=self.subscribe_data_change, name='subscribe_thread')
            # 报警处理线程
            for i in range(opcua_adapter_params['alarm_worker']):
                consumer_alarm_info_task = Thread(target=self.consumer_alarm_info, name=f'consumer_alarm_info_{i + 1}')
                self.thread_list.append(consumer_alarm_info_task)
            # 归档处理线程
            for i in range(opcua_adapter_params['archive_worker']):
                consumer_archive_info_task = Thread(target=self.consumer_archive_info,
                                                    name=f'consumer_archive_info_{i + 1}')
                self.thread_list.append(consumer_archive_info_task)
            # emqx处理线程
            for i in range(opcua_adapter_params['emqx_worker']):
                consumer_emqx_info_task = Thread(target=self.consumer_emqx_info, name=f'consumer_emqx_info_{i + 1}')
                self.thread_list.append(consumer_emqx_info_task)
            # todolist 增加数据到redis的线程

            self.thread_list.append(interval_acqusition_task)
            self.thread_list.append(monitor_thread_task)
            self.thread_list.append(subscribe_thread_task)
            for th in self.thread_list:
                th.start()
            for th in self.thread_list:
                th.join()
            print('开始执行....')
        except Exception as e:
            logger.error(str(e))
        print('任务结束.....')
