#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os
import time
import json
import numpy as np
import threading
import queue
import requests
import logging
from datetime import datetime
import torch
import torch.nn as nn
from sklearn.preprocessing import StandardScaler

logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('EdgeDetector')

CONFIG = {
    'cloud_api_url': 'http://cloud-server:55010/api/can/anomalies',
    'model_path': 'lstm_model.pth',
    'buffer_size': 100,
    'detection_threshold': 0.7,
    'vehicle_id': 'VIN12345',
    'upload_interval': 5,
}

anomaly_buffer = queue.Queue(maxsize=CONFIG['buffer_size'])


class LSTMClassifier(nn.Module):
    def __init__(self, input_dim, hidden_dim, output_dim, num_layers, dropout=0.2):
        super(LSTMClassifier, self).__init__()
        self.lstm = nn.LSTM(input_dim, hidden_dim, num_layers,
                            batch_first=True, dropout=dropout)
        self.fc = nn.Linear(hidden_dim, output_dim)

    def forward(self, x):
        lstm_out, _ = self.lstm(x)
        x = lstm_out[:, -1, :]
        x = self.fc(x)
        return x


def load_model(model_path):
    try:
        input_dim = 8
        hidden_dim = 64
        output_dim = 2
        num_layers = 2

        model = LSTMClassifier(input_dim, hidden_dim, output_dim, num_layers)
        model.load_state_dict(torch.load(model_path))
        model.eval()

        logger.info("Model loaded successfully")
        return model
    except Exception as e:
        logger.error(f"Model loading failed: {e}")
        return None


def extract_features(can_msg):
    data = [can_msg.get(f'DATA_{i}', 0) for i in range(8)]
    return np.array(data, dtype=np.float32)


def predict_anomaly(model, can_msg):
    features = extract_features(can_msg)

    scaler = StandardScaler()

    features = features.reshape(1, -1)
    features = scaler.fit_transform(features)

    features_tensor = torch.tensor(features, dtype=torch.float32)
    features_tensor = features_tensor.unsqueeze(1)

    with torch.no_grad():
        output = model(features_tensor)
        probabilities = torch.softmax(output, dim=1)
        is_anomaly = probabilities[0, 1].item() > CONFIG['detection_threshold']
        confidence = probabilities[0, 1].item()

    return is_anomaly, confidence


def process_can_message(model, can_msg):
    try:
        is_anomaly, confidence = predict_anomaly(model, can_msg)

        if is_anomaly:
            logger.warning(
                f"Anomaly detected! CAN ID: {can_msg['can_id']}, Confidence: {confidence:.4f}")

            anomaly_data = {
                'can_id': can_msg['can_id'],
                'timestamp': int(time.time()),
                'data': [can_msg.get(f'DATA_{i}', 0) for i in range(8)],
                'vehicle_id': CONFIG['vehicle_id'],
                'confidence': confidence,
                'is_anomaly': True
            }

            try:
                anomaly_buffer.put_nowait(anomaly_data)
            except queue.Full:
                logger.error(
                    "Anomaly buffer is full, discarding the oldest message")
                anomaly_buffer.get()
                anomaly_buffer.put(anomaly_data)

            return False
        else:
            return True
    except Exception as e:
        logger.error(f"Error processing CAN message: {e}")
        return False


def cloud_upload_thread():
    while True:
        try:
            messages = []
            while not anomaly_buffer.empty():
                messages.append(anomaly_buffer.get())

            if messages:
                logger.info(
                    f"Uploading {len(messages)} anomaly messages to the cloud")

                for msg in messages:
                    try:
                        response = requests.post(
                            CONFIG['cloud_api_url'],
                            json=msg,
                            headers={'Content-Type': 'application/json'},
                            timeout=10
                        )

                        if response.status_code == 200:
                            logger.info(
                                f"Message uploaded successfully: {msg['can_id']}")
                        else:
                            logger.error(
                                f"Message upload failed: {response.status_code}, {response.text}")
                            anomaly_buffer.put(msg)
                    except Exception as e:
                        logger.error(f"Error during upload: {e}")
                        anomaly_buffer.put(msg)

            time.sleep(CONFIG['upload_interval'])
        except Exception as e:
            logger.error(f"Error in cloud upload thread: {e}")
            time.sleep(CONFIG['upload_interval'])


def simulate_can_bus():
    import random

    while True:
        can_id = random.randint(0, 2047)
        can_msg = {
            'can_id': can_id,
            'timestamp': int(time.time() * 1000),
        }

        for i in range(8):
            can_msg[f'DATA_{i}'] = random.randint(0, 255)

        if random.random() < 0.1:
            can_msg['DATA_3'] = 255
            can_msg['DATA_4'] = 255

        yield can_msg
        time.sleep(0.01)


def main():
    logger.info("Starting CAN bus intrusion detection system")

    model = load_model(CONFIG['model_path'])
    if model is None:
        logger.error("Unable to load model, exiting system")
        return

    upload_thread = threading.Thread(target=cloud_upload_thread, daemon=True)
    upload_thread.start()

    logger.info("Starting CAN bus monitoring")

    for can_msg in simulate_can_bus():
        allow_message = process_can_message(model, can_msg)

        if allow_message:
            pass
        else:
            pass


if __name__ == "__main__":
    main()
