import datetime
import os
import threading
import traceback
import uuid
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures.process import ProcessPoolExecutor
from multiprocessing import cpu_count, Manager

import pandas as pd
from xbase_util.common_util import extract_session_fields, check_path, build_es_expression, s2date, date2s
from xbase_util.db.bean import ConfigBean
from xbase_util.handle_features_util import handle_uri, handle_ua

from base.bean.beans import ProcedureEnum
from base.bean.status_enum import StatusEnum
from base.util.common_util import get_all_threats
from base.util.file_util import gen_catalogue_path
from base.util.redis_util import UpdateStatusParams
from global_ver import global_geoutil, req


def scroll_slice_method(scroll_exp, config, save_queue):
    """
    滚动获取session数据
    :param scroll_exp: ES表达式
    :param config: 配置信息
    :param save_queue: 保存队列
    :return: 返回所有获取到的数据
    """
    print("[session]清理滚动id...")
    print(req.clear_all_scroll())
    try:
        res = req.start_scroll(exp=scroll_exp, scroll=str(config.session_alive), index="arkime_sessions3-*")
        res = res.json()
        scroll_id = res['_scroll_id']
        results = res['hits']['hits']
        save_queue.put(results)
        print(f"保存:{len(results)}")
        if len(results) == 0:
            print("首次没有数据")
            print(res)
            return []
    except Exception as e:
        print("首次请求报错:")
        traceback.print_exc()
        return []
    while True:
        origin_res = req.scroll_by_id(scroll_id=scroll_id, scroll=str(config.session_alive))
        try:
            res = origin_res.json()
            current_scroll_id = res['_scroll_id']
            current_data = res['hits']['hits']
            if len(current_data) == 0:
                print("获取完毕")
                break
            else:
                print("正在获取")
                scroll_id = current_scroll_id
                print(f"保存:{len(current_data)}")
                save_queue.put(current_data)
        except Exception as e:
            print(origin_res.text)
            traceback.print_exc()
            print(f"[session]查询出错：{e},将继续查询")
            continue


def split_data_by_num_df(data, num_chunks):
    """将 pandas DataFrame 按行数分割为 num_chunks 份"""
    chunk_size = len(data) // num_chunks
    return [data.iloc[i * chunk_size:(i + 1) * chunk_size] for i in range(num_chunks - 1)] + [
        data.iloc[(num_chunks - 1) * chunk_size:]]


def save_session_to_file(origin_list, session_id_file, geoUtil):
    """
    保存session数据
    :param origin_list: 数据
    :param session_id_file:要保存的数据文件名字
    :param geoUtil: 读取位置信息的工具
    :return: 无
    """
    print("准备写入")
    start = datetime.datetime.now()
    res = extract_session_fields(origin_list, geoUtil)
    print(f"写入前处理数据:{datetime.datetime.now() - start}")
    file_exists = os.path.isfile(check_path(session_id_file))
    write_header = not file_exists or os.path.getsize(session_id_file) == 0
    df = pd.DataFrame(res)
    df = handle_uri(df, use_tqdm=False)
    df = handle_ua(df, use_tqdm=False)
    df.to_csv(check_path(session_id_file), mode='a', header=write_header, index=False)
    print(f"写入:{len(origin_list)}条，耗时：{datetime.datetime.now() - start}")


def process_method(process_index, chunks, queue):
    print(f"开启进程{process_index + 1},{len(chunks)}")
    res = []
    with ThreadPoolExecutor(max_workers=10) as executor:
        future_to_chunk = []
        for index, chunk_item in enumerate(chunks.iterrows()):
            future_to_chunk.append(executor.submit(process_chunk,
                                                   index, len(chunks), chunk_item, queue, process_index))
        for future in future_to_chunk:
            res.extend(future.result())
    return res


def process_chunk(index, total, traffic1, queue, process_index):
    print(f"进程:{process_index}的第{index}/{total}个数据")
    try:
        traffic = traffic1[1]
        if isinstance(traffic['THREAT_TIME'], str):
            time = traffic['THREAT_TIME'] = datetime.datetime.strptime(traffic['THREAT_TIME'], '%Y-%m-%d %H:%M:%S')
        else:
            time = traffic['THREAT_TIME'].to_pydatetime()
        # if traffic['type'] == "ids":
        arkime_expression = f"(ip.src == {traffic['SIP']}||ip.xff == {traffic['SIP']}) && port.src == {int(traffic['S_PORT'])} && ip.dst == {traffic['DIP']} && port.dst == {int(traffic['D_PORT'])}"
        # else:
        #     arkime_expression = f" && port.src == {int(traffic['S_PORT'])} && ip.dst == {traffic['DIP']} && port.dst == {int(traffic['D_PORT'])}"
        hits = search_with_cross_minutes(time, arkime_expression, initial_minutes=5)
        for item in hits:
            item.update({
                'traffic_type': traffic['type'],
                'PROTOCOL': traffic['PROTOCOL'],
                'DENY_METHOD': traffic['DENY_METHOD'],
                'THREAT_SUMMARY': traffic['THREAT_SUMMARY'],
                'SEVERITY': traffic['SEVERITY']
            })
        return hits
    except Exception as e:
        traceback.print_exc()
        print(e)
        queue.put(UpdateStatusParams(status=StatusEnum.error.value,
                                     procedure=ProcedureEnum.session_abnormal.value,
                                     error=f"{e}", remark="session错误"
                                     ))
        return []


def save_session(queue, config):
    results = []
    while True:
        try:
            msg = queue.get()
            if msg is not None:
                results.extend(msg)
                while len(results) >= int(config.splitNumber):
                    print("分阶段保存session")
                    save_session_to_file(origin_list=results[:int(config.splitNumber)],
                                         session_id_file=f"{gen_catalogue_path(config.catalogue)}session/{uuid.uuid4()}.csv",
                                         geoUtil=global_geoutil)
                    results = results[int(config.splitNumber):]
            elif msg is None:
                if len(results) > 0:
                    print("保存最后一次session")
                    save_session_to_file(origin_list=results,
                                         session_id_file=f"{gen_catalogue_path(config.catalogue)}session/{uuid.uuid4()}.csv",
                                         geoUtil=global_geoutil)
                break
            else:
                continue
        except Exception as e:
            print(e)
            continue


def search_with_cross_minutes(time, arkime_expression, initial_minutes=1):
    """
    递归获取异常信息匹配到的session数据
    :param time: 时间
    :param arkime_expression: 表达式
    :param initial_minutes:最开始的偏移分钟
    :return:搜索他的数据
    """
    cross_minutes = initial_minutes
    while cross_minutes <= 8:
        exp = build_es_expression(
            size=100,
            start_time=time - datetime.timedelta(minutes=cross_minutes),
            end_time=time + datetime.timedelta(minutes=cross_minutes),
            arkime_expression=arkime_expression
        )
        # print(f"请求{time}的{cross_minutes}分钟前后:表达式：{json.dumps(exp)}")
        response = req.search(body=exp).json()
        try:
            current_count = response['hits']['total']['value']
            if current_count >= 1:
                hits = response['hits']['hits']
                return hits
            if cross_minutes == 1:
                cross_minutes = 5
            elif cross_minutes == 5:
                cross_minutes = 8
            else:
                break
        except Exception as e:
            print("报错")
            print(response)
    return []


class SessionUtil:
    """
    :param config配置信息
    :param save_queue保存队列
    """

    @classmethod
    def get_session(cls, config: ConfigBean, save_queue, que) -> bool:
        try:
            arkime_expression = config.session_expression
            scroll_exp = build_es_expression(size=config.session_size,
                                             start_time=s2date(config.session_start_time),
                                             end_time=s2date(config.session_end_time),
                                             arkime_expression=arkime_expression)
            scroll_slice_method(scroll_exp, config, save_queue)
            save_queue.put(None)
            print("[session]获取session完毕，记得启动 arkime capture")
            return True
        except Exception as e:
            print(f"[session]错误：{e}")
            traceback.print_exc()
            que.put(UpdateStatusParams(status=StatusEnum.error.value,
                                       procedure=ProcedureEnum.session_normal.value,
                                       error=f"{e}", process=0.3,
                                       execEndTime=date2s(datetime.datetime.now())))
            save_queue.put(None)
            return False

    @staticmethod
    def split_list_into_chunks(lst, num_chunks):
        avg = len(lst) / float(num_chunks)
        chunks = []
        last = 0.0
        while last < len(lst):
            chunks.append(lst[int(last):int(last + avg)])
            last += avg
        return chunks

    @classmethod
    def get_all_dangerous_session(cls, config: ConfigBean, base_config, que, save_queue) -> bool:
        """
        根据配置信息获取所有异常的session
        :param config: 配置信息
        :param base_config: 配置文件
        :param que: 状态队列
        :param save_queue: 保存队列
        :return:
        """
        print("[获取异常session]开始")
        start_time = s2date(config.session_start_time)
        end_time = s2date(config.session_end_time)
        que.put(UpdateStatusParams(status=StatusEnum.processing.value,
                                   procedure=ProcedureEnum.session_abnormal_get_dangerous.value))
        print(f"[获取异常session]时间范围：{start_time}----{end_time}")
        dangerous = get_all_threats(base_config, end_time, start_time)
        splited_list = split_data_by_num_df(dangerous, cpu_count())
        with ProcessPoolExecutor() as executor:
            futures = []
            for index, chunk in enumerate(splited_list):
                if len(chunk) != 0:
                    futures.append(executor.submit(process_method, index, chunk, que))
            for res in futures:
                result = res.result()
                if len(result) != 0:
                    print(f"保存异常session:{len(result)}")
                    save_queue.put(result)
        print(f"[获取异常session]获取完毕")
        save_queue.put(None)
        que.put(UpdateStatusParams(status=StatusEnum.processing.value, procedure=ProcedureEnum.session_abnormal.value,
                                   process=0.3))
        return True

    @classmethod
    def handle_session(cls, config: ConfigBean, base_config, queue, geoUtil1) -> bool:
        """
        开始根据配置信息获取session
        :param config: 配置信息
        :param base_config: 配置文件
        :param queue: 状态对列
        :param geoUtil1: 位置信息工具
        :return:
        """
        save_queue = Manager().Queue()
        savethread = threading.Thread(target=save_session, args=(save_queue, config))
        savethread.start()
        if config.session_all_true:
            queue.put(
                UpdateStatusParams(status=StatusEnum.processing.value, procedure=ProcedureEnum.session_abnormal.value,
                                   process=0))
            res = cls.get_all_dangerous_session(config=config, base_config=base_config, que=queue,
                                                save_queue=save_queue)
            savethread.join()
            return res
        else:
            queue.put(
                UpdateStatusParams(status=StatusEnum.processing.value, procedure=ProcedureEnum.session_normal.value,
                                   process=0))
            res = cls.get_session(config=config, save_queue=save_queue, que=queue)
            savethread.join()
            return res
