import glob
import os
import shutil
import traceback
import zipfile
from datetime import timedelta, datetime

import pandas as pd
from xbase_util.common_util import check_path, s2date, date2s
from xbase_util.db.bean import ConfigBean

from base.bean.beans import ProcedureEnum
from base.bean.status_enum import StatusEnum
from base.util.common_util import get_all_threats
from base.util.file_util import gen_catalogue_path
from base.util.redis_util import UpdateStatusParams
from global_ver import out_folder


class LabelUtil:
    @classmethod
    def zip_csv_files(cls, dir_src, dir_dst, session_start_time, session_end_time, requestCode):
        start_time = s2date(session_start_time).strftime("%Y%m%d%H%M%S")
        end_time = s2date(session_end_time).strftime("%Y%m%d%H%M%S")
        zip_name = f"{requestCode}_{start_time}_{end_time}.zip"
        zip_path = f"{dir_dst}/{zip_name}"
        with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_DEFLATED) as zip_file:
            for csv_file in glob.glob(os.path.join(dir_src, '*.csv')):
                zip_file.write(csv_file, os.path.basename(csv_file))
        return zip_path.replace("//", "/"), zip_name

    @classmethod
    def save_dataframe_to_multiple_files(cls, config: ConfigBean, df, split_number, queue, base_config):
        fileList = []
        num_files = -(-len(df) // split_number)
        for i in range(num_files):
            path = f"{gen_catalogue_path(config.catalogue)}label/{i + 1}.csv"
            start_index = i * split_number
            end_index = (i + 1) * split_number
            df_subset = df.iloc[start_index:end_index]
            df_subset.to_csv(check_path(path), mode='w',
                             index=False, errors="replace")
            fileList.append({
                "count": len(df_subset),
                "fileCreatedAt": date2s(datetime.now()),
                "fileSize": os.path.getsize(path),
                "fileName": f"{i + 1}.csv"
            })
        zipPath, zipName = cls.zip_csv_files(f"{gen_catalogue_path(config.catalogue)}label/",
                                             f"{gen_catalogue_path(config.catalogue)}label/", config.session_start_time,
                                             config.session_end_time, requestCode=config.id)

        shutil.copy(zipPath, base_config["backup_path"]['newfile'])
        shutil.rmtree(out_folder)

        queue.put(UpdateStatusParams(status=StatusEnum.finished.value,
                                     fileList=fileList,
                                     process=1,
                                     zipPath=f"{base_config['backup_path']['newfile']}/{zipName}",
                                     zipName=zipName,
                                     zipSize=os.path.getsize(f"{base_config['backup_path']['newfile']}/{zipName}")))

        queue.put("CALLBACK")

    @classmethod
    def set_label(cls, config: ConfigBean, base_config, queue) -> bool:
        """
        开始打标签，根据异常数据和筛选信息做对比,比对成功则为异常数据
        :param config:
        :param base_config:
        :param queue:
        :return:
        """
        print("[打标签]开始打标签")
        queue.put(UpdateStatusParams(status=StatusEnum.processing.value, process=0.6, error=""))
        allTrue = config.label_all_true
        # 处理原始数据
        duration = config.label_duration
        if not os.path.exists(f"{gen_catalogue_path(config.catalogue)}pcap/"):
            queue.put(UpdateStatusParams(status=StatusEnum.error.value, process=1,
                                         error=f"目录不存在：{gen_catalogue_path(config.catalogue)}pcap/",
                                         execEndTime=date2s(datetime.now())))
            print("[打标签]文件不存在")
            queue.put(None)
            return False
        session_list = pd.DataFrame()
        filelist = os.listdir(f"{gen_catalogue_path(config.catalogue)}pcap/")
        for ind, pcap_path in enumerate(filelist):
            print(f"[打标签]正在读取{ind + 1}/{len(filelist)}个文件")
            session_list = pd.concat(
                [session_list, pd.read_csv(f"{gen_catalogue_path(config.catalogue)}pcap/{pcap_path}")],
                ignore_index=True)
        unmatch_path = f"{gen_catalogue_path(config.catalogue)}label/label_unmatch.csv"
        print("[打标签]开始，结束后记得启动 arkime capture")
        try:
            session_list = session_list.fillna('')
            session_list['source.ip'] = session_list['source.ip'].astype(str)
            session_list['source.port'] = session_list['source.port'].astype(str)
            session_list['destination.ip'] = session_list['destination.ip'].astype(str)
            session_list['destination.port'] = session_list['destination.port'].astype(str)
            # firstPacket和lastPacket是13位数的时间戳
            session_list['firstPacket'] = session_list['firstPacket'].astype(int)
            session_list['lastPacket'] = session_list['lastPacket'].astype(int)
            # 创建新的列，并一次性添加
            new_columns = pd.DataFrame({
                'firstPacket_datetime': pd.to_datetime(session_list['firstPacket'], unit='ms').dt.tz_localize(
                    'UTC').dt.tz_convert('Asia/Shanghai'),
                'lastPacket_datetime': pd.to_datetime(session_list['lastPacket'], unit='ms').dt.tz_localize(
                    'UTC').dt.tz_convert('Asia/Shanghai'),
                'isDangerous': False
            })

            # 使用 pd.concat 一次性将新列添加到 session_list 中
            session_list = pd.concat([session_list, new_columns], axis=1)
            if allTrue:
                queue.put(UpdateStatusParams(status=StatusEnum.processing.value,
                                             procedure=ProcedureEnum.label_abnormal.value, process=0.9))
                session_list.drop(columns=['firstPacket_datetime', 'lastPacket_datetime'], inplace=True)
                session_list['isDangerous'] = True
                print("[打标签]打标签结束")
            else:
                queue.put(
                    UpdateStatusParams(status=StatusEnum.processing.value, procedure=ProcedureEnum.label_normal.value,
                                       process=0.8))
                session_list['PROTOCOL'] = ''
                session_list['DENY_METHOD'] = ''
                session_list['THREAT_SUMMARY'] = ''
                session_list['SEVERITY'] = ''
                start_time = s2date(config.session_start_time)
                end_time = s2date(config.session_end_time)
                print(f"[打标签]开始获取异常数据库")
                dangerous_all = get_all_threats(base_config, end_time, start_time)
                if len(dangerous_all) == 0:
                    print("[打标签]异常数量为空，打标签结束")
                    session_list.drop(columns=['firstPacket_datetime', 'lastPacket_datetime'], inplace=True)
                else:
                    # 处理异常数据
                    dangerous_all.fillna('', inplace=True)
                    dangerous_all['SIP'] = dangerous_all['SIP'].astype(str)
                    dangerous_all['S_PORT'] = dangerous_all['S_PORT'].astype(str)
                    dangerous_all['DIP'] = dangerous_all['DIP'].astype(str)
                    dangerous_all['D_PORT'] = dangerous_all['D_PORT'].astype(str)
                    dangerous_all['THREAT_TIME'] = pd.to_datetime(dangerous_all['THREAT_TIME'],
                                                                  format='%Y-%m-%d %H:%M:%S')
                    count = 0
                    unmatched_data = []
                    for ind, (i, d_item) in enumerate(dangerous_all.iterrows()):
                        print(f"[打标签]进度：{ind + 1}/{len(dangerous_all)}")
                        if d_item['type'] == 'ids':
                            sip = session_list['source.ip']
                        else:
                            sip = session_list['http.xffIp'].apply(
                                lambda x: x[0] if isinstance(x, list) and len(x) > 0 else "")
                        threat_time = d_item['THREAT_TIME'].tz_localize('Asia/Shanghai')
                        threat_start_time = threat_time - timedelta(minutes=duration)
                        threat_end_time = threat_time + timedelta(minutes=duration)
                        result_session = session_list[(
                                (sip == d_item["SIP"]) &
                                (session_list['source.port'] == d_item["S_PORT"]) &
                                (session_list['destination.ip'] == d_item["DIP"]) &
                                (session_list['destination.port'] == d_item["D_PORT"]) &
                                (session_list['firstPacket_datetime'] >= threat_start_time) &
                                (session_list['lastPacket_datetime'] <= threat_end_time)
                        )]
                        count = count + len(result_session)
                        if len(result_session) != 0:
                            print(
                                f'[打标签]匹配到：{len(result_session)},共匹配到：{count}')
                            session_list.loc[session_list['id'].isin(result_session['id']), [
                                'DENY_METHOD', 'PROTOCOL', 'SEVERITY', 'THREAT_SUMMARY', 'traffic_type',
                                'isDangerous']] = [
                                d_item['DENY_METHOD'],
                                d_item['PROTOCOL'],
                                d_item['SEVERITY'],
                                d_item['type'],
                                d_item['THREAT_SUMMARY'],
                                True
                            ]
                        else:
                            if config.label_is_output_unmatch:
                                unmatched_data.append(d_item)
                                print(
                                    f"[打标签]进度：{ind + 1}/{len(dangerous_all)}｜没有匹配到：{d_item['SIP']}:{d_item['S_PORT']}---{d_item['DIP']}:{d_item['D_PORT']}")
                        queue.put(UpdateStatusParams(status=StatusEnum.processing.value,
                                                     procedure=ProcedureEnum.label_normal.value,
                                                     process=0.8))
                    if config.label_is_output_unmatch:
                        if len(unmatched_data) > 0:
                            print(f"[打标签]完成，有{len(unmatched_data)}条没有匹配到")
                            if not allTrue:
                                pd.DataFrame(unmatched_data).to_csv(
                                    check_path(unmatch_path), mode='w',
                                    index=False, errors="replace")
                    session_list.drop(columns=['firstPacket_datetime', 'lastPacket_datetime'], inplace=True)
                    if allTrue:
                        session_list['isDangerous'] = True
            cls.save_dataframe_to_multiple_files(config, session_list, int(config.splitNumber), queue, base_config)
            return True
        except Exception as e:
            print("[打标签]报错：")
            print(e)
            traceback.print_exc()
            queue.put(UpdateStatusParams(status=StatusEnum.error.value, error=f'{e}', process=1,
                                         execEndTime=date2s(datetime.now())))
            queue.put(None)
            return False
