import multiprocessing
from multiprocessing import Process

import numpy as np
import logging
import calc_breath_info
import config
import socket_client
import var
from DataProcess import data_process
from config import min_filter_frequency_default, max_filter_frequency_default, data_frequency
from utils.helper import Filter_helper
from var import q_recv_data, websocket_queue, event_start_record, records_queue, q_edit_BPM, \
    event_connect_start, queue_send_to_server, event_socket_connected, filter_data_dict_queue, pre_records_queue
from filter import do_filter, smooth_data
import logging

pre_recent_data = []
logger = logging.getLogger("uvicorn")
logger.setLevel(logging.INFO)


def init_log():
    # 配置日志记录
    global logger
    handler = logging.FileHandler("py-breath-gather.log")  # 指定日志文件路径
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.addHandler(logging.StreamHandler())


def init():
    init_log()
    manager = multiprocessing.Manager()
    var.share_dict = manager.dict()
    var.share_dict['q_breath_info'] = {}
    var.share_dict['filter_low'] = min_filter_frequency_default
    var.share_dict['filter_hi'] = max_filter_frequency_default
    var.share_dict['p_t_min_time_interval'] = config.min_time_interval
    # 数据处理进程
    Process(target=data_process, daemon=True,
            args=(
                q_recv_data, websocket_queue, event_start_record,
                records_queue, var.share_dict,
                q_edit_BPM, event_connect_start, var.event_scan_start, var.event_open_cmd_send,
                filter_data_dict_queue, pre_records_queue
            )).start()
    # socket client
    Process(target=socket_client.socket_client_start, daemon=True, args=(
        queue_send_to_server, q_recv_data, event_connect_start, event_socket_connected, var.event_open_cmd_send
    )).start()
    p_calc_breath_info = calc_breath_info.Process_breath_info(websocket_queue, filter_data_dict_queue, var.share_dict)
    p_calc_breath_info.daemon = True
    p_calc_breath_info.start()


def send_open_socket_server_cmd():
    queue_send_to_server.put(bytes(list([0x00, 0x01, 0x00, 0x00])))
    print('发出开启命令')


def send_close_socket_server_cmd():
    queue_send_to_server.put(bytes(list([0x00, 0x00, 0x00, 0x00])))
    print('发出关闭命令')


def send_set_amplify_cmd(val):
    amplify = int(val)
    queue_send_to_server.put(bytes(list([0x00, 0x02, amplify, 0x00])))
    print(f'发出调整增益命令:{amplify}')
    return {
        "code": 1
    }


def close_socket_server():
    send_close_socket_server_cmd()
    return {
        "code": 1
    }


def handle_disconnect():
    # 清除记录中的没有调用结束产生的数据
    if event_start_record.is_set():
        event_start_record.clear()
        clear_record_data()
    # 清除websocket中的未发送数据
    while websocket_queue.qsize() > 0:
        websocket_queue.get()


def handle_edit_data():
    arr = []
    while q_edit_BPM.qsize() > 0:
        item = q_edit_BPM.get()
        arr.append(item)


def handle_recent_data():
    global pre_recent_data
    filter_recent_data_len = 4000
    send_recent_data_len = 3000
    data_arr = []
    filter_helper = Filter_helper()
    while q_edit_BPM.qsize() > 0:
        item = q_edit_BPM.get()
        data_arr.append(item)
    logger.info(f'recent接口，从q_edit_BPM队列中获取的数据长度为:{len(data_arr)}')
    # 如果最新得到的最近数据小于4000条，
    if len(data_arr) < filter_recent_data_len:
        need_pad_len = filter_recent_data_len - len(data_arr)
        # 如果之前存下来的数据长度大于需要长度，那么就补全为4000个点，否则就全部塞进去。
        if len(pre_recent_data) >= need_pad_len:
            data_arr = data_arr + pre_recent_data[-need_pad_len:]
        else:
            data_arr = data_arr + pre_recent_data
    pre_recent_data = data_arr
    filter_params_dic = {
        'low': var.share_dict['filter_low'],
        'hi': var.share_dict['filter_hi']
    }
    # 滤波后的数组
    filter_arr = do_filter(data_arr, filter_params_dic)
    filter_arr = np.around(filter_arr)
    filter_arr = filter_arr[-send_recent_data_len:]
    ans_arr = []
    for index, item in enumerate(filter_arr):
        timestamp = int(index * data_frequency * 1000)
        json_filter_item = {
            'timestamp': timestamp,
            'isValid': False,
            "data_filter": item,
        }
        ans_arr.append(json_filter_item)
        if index == 0 or index == len(filter_arr) - 1:
            continue
        point_type = filter_helper.check_suitable_p_or_t(index, filter_arr)
        if point_type != '':
            json_filter_item['mark'] = point_type
    return ans_arr


def clear_record_data():
    while records_queue.qsize() > 0:
        records_queue.get()


def handle_record_data():
    dict_arr = []
    window_width = 5
    filter_helper = Filter_helper()
    # 开始记录前的辅助滤波数据。
    # while pre_records_queue.qsize() > 0:
    #     dict_arr.append(pre_records_queue.get())
    # pre_len = len(dict_arr)
    # logger.info(f'handle_record_data pre_len length: {pre_len}')
    while records_queue.qsize() > 0:
        dict_arr.append(records_queue.get())
    # 纯数据数组
    raw_arr = filter_helper.map_raw_data_arr(dict_arr)
    low = var.share_dict['filter_low']
    hi = var.share_dict['filter_hi']
    filter_params_dic = {
        'low': low,
        'hi': hi
    }
    logger.info(f'record_data filter params: {low},{hi}')
    # 滤波后的滤波数据。
    mapped_arr = do_filter(raw_arr, filter_params_dic)
    # 四舍五入后的滤波数据。
    data_arr = smooth_data(mapped_arr, window_width)
    data_arr = np.around(data_arr)

    for index, item in enumerate(data_arr):
        if index == 0 or index == len(data_arr) - 1:
            continue
        mapped_item = dict_arr[index]
        mapped_item['data_filter'] = item
        # 用滤波后的数据：index、data_arr 去检查是否有符合要求的波峰波谷，如果有，那么修改原来的dict
        point_type = filter_helper.check_suitable_p_or_t(index, data_arr)
        if point_type != '':
            mapped_item['type'] = point_type
    # dict_arr = dict_arr[(pre_len - window_width - 1):]
    return dict_arr
