import json
import os
import socket
import struct
import threading
import time
from datetime import datetime

import pandas as pd
import redis
from joblib import load

from db import get_session
from db.tables import Traffic, Requests

# 加载模型和缩放器
model = load('backend/model/detect_model_mini.joblib')
scaler = load('backend/model/scaler_mini.joblib')

# 创建数据库会话

total_requests = 0
normal_requests = 0
anomalous_requests = 0
# 创建一个原始套接字，监听所有传入的 IP 数据包
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_IP)
s.bind(("127.0.0.1", 8123))  # 绑定到本机的特定端口
s.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
s.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)  # 在 Windows 上启用混杂模式

flow_stats = {
    'start_time': None,
    'end_time': None,
    'total_packets': 0,
    'total_length': 0,
    'packet_lengths': [],
    'iat_times': [],
    'last_packet_time': None
}


def update_flow_stats(packet_length, packet_time):
    if flow_stats['start_time'] is None:
        flow_stats['start_time'] = packet_time

    flow_stats['end_time'] = packet_time
    flow_stats['total_packets'] += 1
    flow_stats['total_length'] += packet_length
    flow_stats['packet_lengths'].append(packet_length)

    if flow_stats['last_packet_time'] is not None:
        iat = packet_time - flow_stats['last_packet_time']
        flow_stats['iat_times'].append(iat)

    flow_stats['last_packet_time'] = packet_time


# 模拟接收数据包
update_flow_stats(50, time.time())
time.sleep(0.01)
update_flow_stats(60, time.time())


def parse_packet(packet):
    # 解析 IP 头
    ip_header = packet[0:20]
    iph = struct.unpack('!BBHHHBBH4s4s', ip_header)
    version_ihl = iph[0]
    ihl = version_ihl & 0xF
    iph_length = ihl * 4

    # 解析 TCP 头
    tcp_header = packet[iph_length:iph_length + 20]
    tcph = struct.unpack('!HHLLBBHHH', tcp_header)
    source_port = tcph[0]
    destination_port = tcph[1]
    sequence = tcph[2]
    acknowledgement = tcph[3]
    doff_reserved = tcph[4]
    tcph_length = doff_reserved >> 4

    return {
        'source_port': source_port,
        'destination_port': destination_port,
        'sequence': sequence,
        'acknowledgement': acknowledgement,
        'tcp_header_length': tcph_length * 4,
        'data': packet[iph_length + tcph_length * 4:]
    }


def get_post():
    while True:
        session = get_session()
        data, addr = s.recvfrom(65565)
        parsed_data = parse_packet(data)
        # 假设以下变量已通过某种方式获得
        source_port = parsed_data['source_port']
        destination_port = parsed_data['destination_port']
        protocol = 6  # TCP协议编号
        # 这里需要添加逻辑来计算其他 TCP 特征

        # 计算统计数据
        flow_duration = flow_stats['end_time'] - flow_stats['start_time']
        total_fwd_packets = flow_stats['total_packets']
        total_length_of_fwd_packets = flow_stats['total_length']
        fwd_packet_length_max = max(flow_stats['packet_lengths'])
        fwd_packet_length_min = min(flow_stats['packet_lengths'])
        flow_iat_mean = sum(flow_stats['iat_times']) / len(flow_stats['iat_times']) if flow_stats['iat_times'] else 0
        flow_iat_std = (sum((x - flow_iat_mean) ** 2 for x in flow_stats['iat_times']) / len(
            flow_stats['iat_times'])) ** 0.5 if \
            flow_stats['iat_times'] else 0
        flow_iat_max = max(flow_stats['iat_times']) if flow_stats['iat_times'] else 0

        # 数据预处理
        df = pd.DataFrame([{
            'Source Port': source_port, 'Destination Port': destination_port, 'Protocol': protocol,
            'Flow Duration': flow_duration, 'Total Fwd Packets': total_fwd_packets,
            'Total Length of Fwd Packets': total_length_of_fwd_packets,
            'Fwd Packet Length Max': fwd_packet_length_max, 'Fwd Packet Length Min': fwd_packet_length_min,
            'Flow IAT Mean': flow_iat_mean, 'Flow IAT Std': flow_iat_std, 'Flow IAT Max': flow_iat_max
        }])

        # 假设 total_traffic 就是数据包的长度
        total_traffic = len(data)

        # 标准化
        df.columns = [
            'Source Port', 'Destination Port', 'Protocol', 'Flow Duration', 'Total Fwd Packets',
            'Total Length of Fwd Packets', 'Fwd Packet Length Max', 'Fwd Packet Length Min',
            'Flow IAT Mean', 'Flow IAT Std', 'Flow IAT Max'
        ]

        # 标准化
        df_scaled = scaler.transform(df)

        # 将标准化后的数据转换回 DataFrame，并重新设置列名
        df_scaled = pd.DataFrame(df_scaled, columns=df.columns)

        # 模型预测
        predictions = model.predict(df_scaled)
        label = 'Anomalous' if predictions[0] == 1 else 'Normal'

        # 创建一个新的Traffic对象
        traffic_data = Traffic(
            timestamp=datetime.now(),
            source_port=source_port,
            destination_port=destination_port,
            protocol=protocol,
            flow_duration=flow_duration,
            total_fwd_packets=total_fwd_packets,
            total_length_of_fwd_packets=total_length_of_fwd_packets,
            fwd_packet_length_max=fwd_packet_length_max,
            fwd_packet_length_min=fwd_packet_length_min,
            flow_iat_mean=flow_iat_mean,
            flow_iat_std=flow_iat_std,
            flow_iat_max=flow_iat_max,
            total_traffic=total_traffic,
            anomalous_traffic=total_traffic if label == 'Anomalous' else 0,
            normal_traffic=total_traffic if label == 'Normal' else 0,
            label=label
        )
        global total_requests, normal_requests, anomalous_requests
        total_requests += 1  # 每接收到一个数据包，总请求数加一
        if label == 'Normal':
            normal_requests += 1
        elif label == 'Anomalous':
            anomalous_requests += 1

        # 将新的Traffic对象添加到会话中
        session.add(traffic_data)
        # 提交会话
        session.commit()


def print_req_num():
    global total_requests, normal_requests, anomalous_requests
    session = get_session()
    print(f"Total requests: {total_requests}")
    print(f"Normal requests: {normal_requests}")
    print(f"Anomalous requests: {anomalous_requests}")

    # 插入新数据
    new_request = Requests(
        total_requests=total_requests,
        normal_requests=normal_requests,
        anomalous_requests=anomalous_requests
    )

    session.add(new_request)
    session.commit()

    current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    # 准备要存储的数据
    data = {
        "Total": total_requests,
        "Normal": normal_requests,
        "Anomalous": anomalous_requests
    }

    redis_client = redis.Redis(host='localhost', port=6379, db=2, decode_responses=True)
    # 将数据以 JSON 格式存储到 Redis
    redis_client.set(current_time, json.dumps(data))
    # 重置每秒的请求数统计
    total_requests = 0
    normal_requests = 0
    anomalous_requests = 0


def schedule_print():
    threading.Timer(1.0, schedule_print).start()
    print_req_num()


def main():
    schedule_print()
    get_post()


def get_file_info(file_path):
    try:
        size = os.path.getsize(file_path) / (1024 * 1024)  # MB单位
        with open(file_path, 'r', encoding='utf-8') as file:
            count = sum(1 for line in file)
        return round(size, 2), count
    except Exception as e:
        return 0, 0  # 如果文件不存在或发生错误，返回0


def get_dataset_info():
    base_path = "backend/dataset/"  # 假设文件存放在这个目录下
    files = [
        "LDAP.csv", "MSSQL.csv", "NetBIOS.csv", "Portmap.csv",
        "Syn.csv", "UDP.csv", "UDPLag.csv", "merged/all_data.csv",
        "merged/all_small.csv", "merged/test.csv", "merged/test_mini.csv",
        "merged/train.csv", "merged/train_mini.csv"
    ]
    model_files = [
        "model/detect_model.joblib", "model/detect_model_mini.joblib"
    ]

    dataset_info = {}
    for file in files:
        size, count = get_file_info(os.path.join(base_path, file))
        dataset_info[file.split('/')[-1].replace('.csv', '')] = {"大小": f"{size} MB", "数量": count}

    for file in model_files:
        size, _ = get_file_info(os.path.join("backend", file))
        dataset_info[file.split('/')[-1]] = {"大小": f"{size} MB"}

    # 将结果保存到JSON文件中
    with open("dataset_info.json", "w", encoding="utf-8") as f:
        json.dump(dataset_info, f, ensure_ascii=False, indent=4)

    return dataset_info


if __name__ == '__main__':
    schedule_print()
    get_post()
