import multiprocessing
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from datetime import datetime, timedelta

import pandas as pd
from xbase_util.add_column_util import handle_dns
from xbase_util.es_db_util import EsDb
from xbase_util.handle_features_util import handle_uri, handle_ua
from xbase_util.xbase_util import build_es_expression, extract_session_fields

from global_ver import es_req, global_geo_util, base_config
from src.util.common_util import split_data_by_chunk, split_data_by_num
from src.util.dangerous_util import DangerousUtil
from src.util.package import PacketUtil

if __name__ == '__main__':
    scene=input("场景:")
    manager = multiprocessing.Manager()
    file = open(f"{scene}_ids.txt")
    ids = file.read().split("\n")
    session_list = []
    start_time = datetime.strptime("2024-12-17 09:00:00", "%Y-%m-%d %H:%M:%S")
    end_time = datetime.strptime("2024-12-17 09:30:00", "%Y-%m-%d %H:%M:%S")
    def thread_method(arkime_id, start_time, end_time):
        es_exp = build_es_expression(1, f"id=={arkime_id}", start_time, end_time, bounded_type="last")
        res = es_req.search(es_exp, "arkime_sessions3*")
        res_data = res.json()['hits']['hits']
        return res_data
    with ThreadPoolExecutor(max_workers=100) as executor:
        futures=[]
        for index,arkime_id in enumerate(ids):
            futures.append(executor.submit(thread_method, arkime_id, start_time, end_time))
        for index,future in enumerate(futures):
            session_list.extend(future.result())
            print(f"进度:{index+1}/{len(futures)},已获取:{len(session_list)}")
    print(f"共获取:{len(session_list)}")
    data_list = extract_session_fields(session_list, global_geo_util)
    time = datetime.now()
    df = handle_uri(pd.DataFrame(data_list), use_tqdm=False)
    df = handle_ua(df, use_tqdm=False)
    df = handle_dns(df, isDataFrame=True, use_tqdm=False)
    data_list = df.to_dict(orient='records')
    global_esDb = EsDb(es_req, manager)
    final_list = []
    config_processor_count = 10
    config_pcap_thread_in_process = 50
    chunks = split_data_by_num(data_list, config_processor_count)
    with ProcessPoolExecutor(max_workers=config_processor_count) as executor:
        futures = []
        for idx, chunk in enumerate(chunks):
            futures.append(executor.submit(PacketUtil.package_session_process,
                                           config_pcap_thread_in_process,
                                           chunk,
                                           base_config['path_prefix'],
                                           global_esDb, ))
        for core_index, future_item in enumerate(futures):
            final_list.extend(future_item.result())
    session_list = pd.DataFrame(final_list).fillna('')
    session_list['source.ip'] = session_list['source.ip'].astype(str)
    session_list['source.port'] = session_list['source.port'].astype(str)
    session_list['destination.ip'] = session_list['destination.ip'].astype(str)
    session_list['destination.port'] = session_list['destination.port'].astype(str)
    session_list['firstPacket'] = session_list['firstPacket'].astype(int)
    session_list['lastPacket'] = session_list['lastPacket'].astype(int)
    new_columns = pd.DataFrame({
        'firstPacket_datetime': pd.to_datetime(session_list['firstPacket'], unit='ms').dt.tz_localize(
            'UTC').dt.tz_convert('Asia/Shanghai'),
        'lastPacket_datetime': pd.to_datetime(session_list['lastPacket'], unit='ms').dt.tz_localize(
            'UTC').dt.tz_convert('Asia/Shanghai'),
        'isDangerous': False
    })
    session_list = pd.concat([session_list, new_columns], axis=1)
    session_list['PROTOCOL'] = ''
    session_list['DENY_METHOD'] = ''
    session_list['THREAT_SUMMARY'] = ''
    session_list['SEVERITY'] = ''
    session_list.to_csv("final.csv", index=False)
    print(f"[打标签]开始获取异常数据库")
    duration = base_config['label_duration']
    dangerous_all = DangerousUtil.get_dangerous_ips(start_time, end_time, base_config)
    dangerous_all['type'] = 'ids'
    print(f"[打标签]异常数量{len(dangerous_all)}")
    if len(dangerous_all) == 0:
        print("[打标签]异常数量为空，打标签结束")
        session_list.drop(columns=['firstPacket_datetime', 'lastPacket_datetime'], inplace=True)
    else:
        # 处理异常数据
        dangerous_all.fillna('', inplace=True)
        dangerous_all['SIP'] = dangerous_all['SIP'].astype(str)
        dangerous_all['S_PORT'] = dangerous_all['S_PORT'].astype(str)
        dangerous_all['DIP'] = dangerous_all['DIP'].astype(str)
        dangerous_all['D_PORT'] = dangerous_all['D_PORT'].astype(str)
        dangerous_all['THREAT_TIME'] = pd.to_datetime(dangerous_all['THREAT_TIME'],
                                                      format='%Y-%m-%d %H:%M:%S')

        count = 0
        for ind, (i, d_item) in enumerate(dangerous_all.iterrows()):
            print(f"[打标签]进度：{ind + 1}/{len(dangerous_all)}")
            if d_item['type'] == 'ids':
                sip = session_list['source.ip']
            else:
                sip = session_list['http.xffIp'].apply(
                    lambda x: x[0] if isinstance(x, list) and len(x) > 0 else "")
            threat_time = d_item['THREAT_TIME'].tz_localize('Asia/Shanghai')
            threat_start_time = threat_time - timedelta(minutes=duration)
            threat_end_time = threat_time + timedelta(minutes=duration)
            result_session = session_list[(
                    (sip == d_item["SIP"]) &
                    (session_list['source.port'] == d_item["S_PORT"]) &
                    (session_list['destination.ip'] == d_item["DIP"]) &
                    (session_list['destination.port'] == d_item["D_PORT"]) &
                    (session_list['firstPacket_datetime'] >= threat_start_time) &
                    (session_list['lastPacket_datetime'] <= threat_end_time)
            )]
            count = count + len(result_session)
            if len(result_session) != 0:
                print(
                    f'[打标签]匹配到：{len(result_session)},共匹配到：{count}')
                session_list.loc[session_list['id'].isin(result_session['id']), [
                    'DENY_METHOD', 'PROTOCOL', 'SEVERITY', 'THREAT_SUMMARY', 'traffic_type',
                    'isDangerous']] = [
                    d_item['DENY_METHOD'],
                    d_item['PROTOCOL'],
                    d_item['SEVERITY'],
                    d_item['type'],
                    d_item['THREAT_SUMMARY'],
                    True
                ]
        session_list.drop(columns=['firstPacket_datetime', 'lastPacket_datetime'], inplace=True,
                          errors='ignore')

    print(f"{len(session_list)}条")
    session_list.to_csv(f"{scene}_ids.csv", index=False,
                        columns=[col for col in session_list.columns if col != 'pcap_flow_text'])
