import { EventEmitter } from 'events';
import { HealthChecker, HealthStatus } from './health-checker';
import { Logger } from '@/types';
import { createLogger } from '@/utils/logger';

/**
 * Alert severity levels
 */
export enum AlertSeverity {
  INFO = 'info',
  WARNING = 'warning',
  ERROR = 'error',
  CRITICAL = 'critical',
}

/**
 * Alert interface
 */
export interface Alert {
  id: string;
  severity: AlertSeverity;
  title: string;
  message: string;
  timestamp: number;
  source: string;
  metadata?: Record<string, unknown>;
}

/**
 * Monitoring configuration
 */
export interface MonitoringConfig {
  healthCheckInterval: number; // milliseconds
  alertThresholds: {
    memoryUsage: number; // percentage
    connectionFailures: number; // count
    messageBacklog: number; // count per queue
    processingLatency: number; // milliseconds
  };
  retentionPeriod: number; // milliseconds
}

/**
 * Monitoring system for RabbitMQ operations
 */
export class MonitoringSystem extends EventEmitter {
  private readonly healthChecker: HealthChecker;
  private readonly config: MonitoringConfig;
  private readonly logger: Logger;
  private readonly alerts: Alert[] = [];
  private readonly metrics: Map<string, number[]> = new Map();
  private monitoringTimer?: NodeJS.Timeout;
  private isMonitoring = false;

  constructor(
    healthChecker: HealthChecker,
    config: Partial<MonitoringConfig> = {},
    logger?: Logger
  ) {
    super();
    this.healthChecker = healthChecker;
    this.config = {
      healthCheckInterval: 30000, // 30 seconds
      alertThresholds: {
        memoryUsage: 80, // 80%
        connectionFailures: 5,
        messageBacklog: 10000,
        processingLatency: 5000, // 5 seconds
      },
      retentionPeriod: 24 * 60 * 60 * 1000, // 24 hours
      ...config,
    };
    this.logger = logger ?? createLogger('MonitoringSystem');
  }

  /**
   * Start monitoring
   */
  start(): void {
    if (this.isMonitoring) {
      this.logger.warn('Monitoring is already running');
      return;
    }

    this.isMonitoring = true;
    this.logger.info('Starting monitoring system');

    // Start periodic health checks
    this.monitoringTimer = setInterval(() => {
      this.performHealthCheck().catch(error => {
        this.logger.error('Health check failed', error);
      });
    }, this.config.healthCheckInterval);

    // Start cleanup timer
    setInterval(
      () => {
        this.cleanupOldData();
      },
      60 * 60 * 1000
    ); // Cleanup every hour

    this.emit('started');
  }

  /**
   * Stop monitoring
   */
  stop(): void {
    if (!this.isMonitoring) {
      return;
    }

    this.isMonitoring = false;

    if (this.monitoringTimer) {
      clearInterval(this.monitoringTimer);
      this.monitoringTimer = undefined;
    }

    this.logger.info('Monitoring system stopped');
    this.emit('stopped');
  }

  /**
   * Perform health check and generate alerts
   */
  private async performHealthCheck(): Promise<void> {
    try {
      const healthStatus = await this.healthChecker.checkHealth();

      // Record metrics
      this.recordMetric('health_check_duration', Date.now() - healthStatus.timestamp);
      this.recordMetric('memory_usage', healthStatus.checks.memory.usage);

      // Check for alerts
      await this.checkAlerts(healthStatus);

      this.emit('healthCheck', healthStatus);
    } catch (error) {
      this.createAlert(
        AlertSeverity.ERROR,
        'Health Check Failed',
        `Health check failed: ${(error as Error).message}`,
        'monitoring'
      );
    }
  }

  /**
   * Check health status and generate alerts
   */
  private async checkAlerts(healthStatus: HealthStatus): Promise<void> {
    // Check overall health
    if (healthStatus.status === 'unhealthy') {
      this.createAlert(
        AlertSeverity.CRITICAL,
        'System Unhealthy',
        'RabbitMQ system is in unhealthy state',
        'health_check',
        { healthStatus }
      );
    } else if (healthStatus.status === 'degraded') {
      this.createAlert(
        AlertSeverity.WARNING,
        'System Degraded',
        'RabbitMQ system is in degraded state',
        'health_check',
        { healthStatus }
      );
    }

    // Check memory usage
    const memoryUsagePercent =
      (healthStatus.checks.memory.usage / healthStatus.checks.memory.limit) * 100;
    if (memoryUsagePercent > this.config.alertThresholds.memoryUsage) {
      this.createAlert(
        memoryUsagePercent > 95 ? AlertSeverity.CRITICAL : AlertSeverity.WARNING,
        'High Memory Usage',
        `Memory usage is ${memoryUsagePercent.toFixed(1)}%`,
        'memory',
        { memoryUsage: healthStatus.checks.memory }
      );
    }

    // Check connection status
    if (healthStatus.checks.connection.status === 'fail') {
      this.createAlert(
        AlertSeverity.CRITICAL,
        'Connection Failed',
        healthStatus.checks.connection.message,
        'connection'
      );
    }

    // Check queue backlogs
    if (healthStatus.checks.queues) {
      for (const queue of healthStatus.checks.queues.details) {
        if (queue.messageCount > this.config.alertThresholds.messageBacklog) {
          this.createAlert(
            AlertSeverity.WARNING,
            'Queue Backlog',
            `Queue ${queue.name} has ${queue.messageCount} messages`,
            'queue',
            { queueName: queue.name, messageCount: queue.messageCount }
          );
        }

        if (queue.consumerCount === 0 && queue.messageCount > 0) {
          this.createAlert(
            AlertSeverity.ERROR,
            'No Consumers',
            `Queue ${queue.name} has messages but no consumers`,
            'queue',
            { queueName: queue.name, messageCount: queue.messageCount }
          );
        }
      }
    }
  }

  /**
   * Create and emit an alert
   */
  createAlert(
    severity: AlertSeverity,
    title: string,
    message: string,
    source: string,
    metadata?: Record<string, unknown>
  ): void {
    const alert: Alert = {
      id: `alert_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
      severity,
      title,
      message,
      timestamp: Date.now(),
      source,
      metadata,
    };

    this.alerts.push(alert);
    this.logger.warn(`Alert created: ${title}`, { alert });
    this.emit('alert', alert);

    // Auto-resolve info alerts after a short time
    if (severity === AlertSeverity.INFO) {
      setTimeout(() => {
        this.resolveAlert(alert.id);
      }, 60000); // 1 minute
    }
  }

  /**
   * Resolve an alert
   */
  resolveAlert(alertId: string): boolean {
    const index = this.alerts.findIndex(alert => alert.id === alertId);
    if (index !== -1) {
      const alert = this.alerts.splice(index, 1)[0];
      this.logger.info(`Alert resolved: ${alert.title}`, { alertId });
      this.emit('alertResolved', alert);
      return true;
    }
    return false;
  }

  /**
   * Get active alerts
   */
  getActiveAlerts(severity?: AlertSeverity): Alert[] {
    if (severity) {
      return this.alerts.filter(alert => alert.severity === severity);
    }
    return [...this.alerts];
  }

  /**
   * Record a metric value
   */
  recordMetric(name: string, value: number): void {
    if (!this.metrics.has(name)) {
      this.metrics.set(name, []);
    }

    const values = this.metrics.get(name)!;
    values.push(value);

    // Keep only recent values (last 1000 data points)
    if (values.length > 1000) {
      values.shift();
    }
  }

  /**
   * Get metric statistics
   */
  getMetricStats(name: string): {
    count: number;
    min: number;
    max: number;
    avg: number;
    latest: number;
  } | null {
    const values = this.metrics.get(name);
    if (!values || values.length === 0) {
      return null;
    }

    const min = Math.min(...values);
    const max = Math.max(...values);
    const avg = values.reduce((sum, val) => sum + val, 0) / values.length;
    const latest = values[values.length - 1];

    return {
      count: values.length,
      min,
      max,
      avg,
      latest,
    };
  }

  /**
   * Get all available metrics
   */
  getAllMetrics(): Record<string, ReturnType<MonitoringSystem['getMetricStats']>> {
    const result: Record<string, ReturnType<MonitoringSystem['getMetricStats']>> = {};

    for (const [name] of this.metrics) {
      result[name] = this.getMetricStats(name);
    }

    return result;
  }

  /**
   * Clean up old data
   */
  private cleanupOldData(): void {
    const cutoffTime = Date.now() - this.config.retentionPeriod;

    // Clean up old alerts
    const initialAlertCount = this.alerts.length;
    for (let i = this.alerts.length - 1; i >= 0; i--) {
      if (this.alerts[i].timestamp < cutoffTime) {
        this.alerts.splice(i, 1);
      }
    }

    const removedAlerts = initialAlertCount - this.alerts.length;
    if (removedAlerts > 0) {
      this.logger.debug(`Cleaned up ${removedAlerts} old alerts`);
    }
  }

  /**
   * Get monitoring status
   */
  getStatus(): {
    isMonitoring: boolean;
    activeAlerts: number;
    alertsBySeverity: Record<AlertSeverity, number>;
    metricsCount: number;
    uptime: number;
  } {
    const alertsBySeverity = {
      [AlertSeverity.INFO]: 0,
      [AlertSeverity.WARNING]: 0,
      [AlertSeverity.ERROR]: 0,
      [AlertSeverity.CRITICAL]: 0,
    };

    for (const alert of this.alerts) {
      alertsBySeverity[alert.severity]++;
    }

    return {
      isMonitoring: this.isMonitoring,
      activeAlerts: this.alerts.length,
      alertsBySeverity,
      metricsCount: this.metrics.size,
      uptime: process.uptime(),
    };
  }
}

/**
 * Utility function to create monitoring system
 */
export const createMonitoringSystem = (
  healthChecker: HealthChecker,
  config?: Partial<MonitoringConfig>,
  logger?: Logger
): MonitoringSystem => {
  return new MonitoringSystem(healthChecker, config, logger);
};
