import json

import numpy as np

from config import pack_len_to_filter, pack_len_to_send, data_frequency
from filter import butter_lowpass_filter, fft_filter, do_filter
from utils.helper import Filter_helper

# 当次待发送的所有数据
arr = []

# 用来记录收到的socket粘包的值
prefix_arr = []

# 记录当前去生成过滤的原始数据
use_filter_arr = []

# 处理过滤数据的粘包问题
prefix_filter_count = 0

websocket_queue = None

total_count_since_start = 0

received_count = 0
received_filtered_count = 0

event_start_record = None
pre_records_queue = None
records_queue = None
q_edit_BPM = None

event_connect_start = None
share_dict = None
event_scan_start = None
event_open_cmd_send = None

filter_helper = Filter_helper()
filter_data_dict_queue = None


def data_process(q_recv_data, websocket_queue1, event_start_record1,
                 records_queue1, share_dict1,
                 q_edit_BPM1, event_connect_start1,
                 event_scan_start1, event_open_cmd_send1, filter_data_dict_queue1, pre_records_queue1
                 ):
    global websocket_queue
    global event_start_record
    global records_queue
    global q_edit_BPM
    global event_connect_start
    global share_dict
    global event_scan_start
    global event_open_cmd_send
    global filter_data_dict_queue
    global pre_records_queue
    event_start_record = event_start_record1
    websocket_queue = websocket_queue1
    records_queue = records_queue1
    q_edit_BPM = q_edit_BPM1
    event_connect_start = event_connect_start1
    share_dict = share_dict1
    event_scan_start = event_scan_start1
    event_open_cmd_send = event_open_cmd_send1
    filter_data_dict_queue = filter_data_dict_queue1
    pre_records_queue = pre_records_queue1

    while True:
        if event_connect_start.is_set() and event_open_cmd_send.is_set():
            if q_recv_data.qsize() > 0:
                arr = q_recv_data.get()
                handle_data(arr)
        else:
            handle_disconnect(q_recv_data)


def handle_disconnect(q_recv_data):
    global received_count
    global received_filtered_count
    global total_count_since_start
    global prefix_arr
    global arr
    global use_filter_arr
    global prefix_filter_count
    global q_edit_BPM
    while q_recv_data.qsize() > 0:
        q_recv_data.get()
    while q_edit_BPM.qsize() > 0:
        q_edit_BPM.get()
    received_count = 0
    received_filtered_count = 0
    total_count_since_start = 0
    prefix_arr = []
    arr = []
    use_filter_arr = []
    prefix_filter_count = 0


def handle_data(current_data_arr):
    record_use_filter_data(current_data_arr)
    record_use_send_data(current_data_arr)


def record_use_send_data(current_data_arr):
    global arr
    global prefix_arr
    prefix_arr = prefix_arr + current_data_arr
    while True:
        if len(prefix_arr) < pack_len_to_send:
            break
        else:
            arr = prefix_arr[:pack_len_to_send]
            map_received_data(raw_data=arr, filter_data=None)
            prefix_arr = prefix_arr[pack_len_to_send:]


def record_use_filter_data(current_data_arr):
    # 用于滤波的数据
    global use_filter_arr
    # 用于滤波数据的前多少个点为已经发送的
    global prefix_filter_count
    global total_count_since_start
    total_count_since_start = total_count_since_start + len(current_data_arr)
    prefix_filter_count = prefix_filter_count + len(current_data_arr)
    use_filter_arr = use_filter_arr + current_data_arr
    # 如果用于过滤的数据已经超出了范围，那么裁剪一下
    if len(use_filter_arr) >= pack_len_to_filter:
        use_filter_arr = use_filter_arr[-pack_len_to_filter:]
    # 如果已经存储的数据大于发送长度，那么进行一次滤波，并截取出来数据。
    if prefix_filter_count >= pack_len_to_send:
        need_send_list_total = get_filter_sliced_data(prefix_filter_count)
        need_send_list_total = np.around(need_send_list_total)
        while prefix_filter_count >= pack_len_to_send:
            need_send_list = need_send_list_total[:pack_len_to_send]
            prefix_filter_count = prefix_filter_count - pack_len_to_send
            map_received_data(raw_data=None, filter_data=need_send_list)


def get_filter_sliced_data(need_send_filter_len):
    global use_filter_arr
    global share_dict
    filter_params_dic = {
        'low': share_dict['filter_low'],
        'hi': share_dict['filter_hi']
    }
    mapped_arr = do_filter(use_filter_arr, filter_params_dic)
    mapped_list = mapped_arr.tolist()
    return mapped_list[-need_send_filter_len:]


def send_ws_msg(msg):
    global websocket_queue
    websocket_queue.put(msg)


def map_received_data(raw_data, filter_data):
    global received_count
    global received_filtered_count
    global event_scan_start
    arr1 = []
    if raw_data is not None:
        for index, item in enumerate(raw_data):
            mapped_item = {
                'timestamp': int(received_count * data_frequency * 1000),
                'isValid': event_scan_start.is_set(),
                "data_origin": item
            }
            mapped_item1 = {
                'timestamp': int(received_count * data_frequency * 1000),
                'isValid': event_scan_start.is_set(),
                "data_filter": item
            }
            arr1.append(mapped_item)
            # 记录编辑BPM的接口数据
            record_edit_data(item)
            # 记录编辑XVP的接口数据
            record_data(mapped_item1)
            received_count = received_count + 1
        # 发送原始数据
        send_ws_msg(json.dumps({
            "name": "Breathe",
            "breatheVoList": arr1
        }))
    elif filter_data is not None:
        for index, item in enumerate(filter_data):
            timestamp = int(received_filtered_count * data_frequency * 1000)
            mapped_item = {
                'timestamp': timestamp,
                'isValid': event_scan_start.is_set(),
                "data_filter": item,
            }
            arr1.append(mapped_item)
            received_filtered_count = received_filtered_count + 1
            calc_p_t(mapped_item)
        msg = json.dumps({
            "name": "Breathe",
            "breatheVoList": arr1
        })
        # 发送过滤数据
        send_ws_msg(msg)


# 根据收到的所有滤波数据计算最近3000点的BPM信息，每1000个点更新一次。
def calc_p_t(mapped_item):
    global filter_data_dict_queue
    filter_data_dict_queue.put(mapped_item)


def record_data(mapped_data):
    global event_start_record
    global records_queue
    global pre_records_queue
    if mapped_data is None:
        return
    if event_start_record.is_set():
        if records_queue.full():
            records_queue.get()
        records_queue.put(mapped_data)
    else:
        if pre_records_queue.full():
            pre_records_queue.get()
        pre_records_queue.put(mapped_data)


def record_edit_data(mapped_item):
    global q_edit_BPM
    if q_edit_BPM.full():
        q_edit_BPM.get()
    q_edit_BPM.put(mapped_item)
