import { ConnectionPool } from '@/core/connection-pool';
import { Producer } from '@/core/producer';
import { Consumer } from '@/core/consumer';
import { Message } from '@/core/message';
import { MessageHandler, PerformanceTestResult, Logger } from '@/types';
import { createLogger } from '@/utils/logger';
import { createFunctionHandler } from '@/core/handlers';

/**
 * Performance tester for RabbitMQ operations
 */
export class PerformanceTester {
  private readonly connectionPool: ConnectionPool;
  private readonly logger: Logger;

  constructor(connectionPool: ConnectionPool, logger?: Logger) {
    this.connectionPool = connectionPool;
    this.logger = logger ?? createLogger('PerformanceTester');
  }

  /**
   * Run throughput test
   */
  async runThroughputTest(
    queueName: string,
    messageCount: number,
    concurrentProducers = 10,
    messageSize = 1024
  ): Promise<PerformanceTestResult> {
    this.logger.info(
      `Starting throughput test: ${messageCount} messages, ${concurrentProducers} producers`
    );

    // Setup queue
    const connection = await this.connectionPool.getConnection();
    const channel = connection.channel;
    await channel.assertQueue(queueName, { durable: false });
    await this.connectionPool.returnConnection(connection);

    const startTime = Date.now();
    const messageData = 'x'.repeat(messageSize); // Create message of specified size

    // Create producer workers
    const producerWorkers = Array.from({ length: concurrentProducers }, async (_, workerId) => {
      const producer = new Producer(this.connectionPool);
      await producer.initialize();

      const messagesPerWorker = Math.floor(messageCount / concurrentProducers);
      const extraMessages = workerId === 0 ? messageCount % concurrentProducers : 0;
      const totalMessages = messagesPerWorker + extraMessages;

      for (let i = 0; i < totalMessages; i++) {
        const message = new Message({
          workerId,
          messageNum: i,
          data: messageData,
          timestamp: Date.now(),
        });

        await producer.send(message, { routingKey: queueName });
      }

      await producer.close();
      return totalMessages;
    });

    // Wait for all producers to complete
    const results = await Promise.all(producerWorkers);
    const totalSent = results.reduce((sum, count) => sum + count, 0);

    const endTime = Date.now();
    const duration = endTime - startTime;
    const throughput = totalSent / (duration / 1000);

    this.logger.info(`Throughput test completed: ${throughput.toFixed(2)} messages/second`);

    return {
      messageCount: totalSent,
      duration,
      throughput,
      concurrentProducers,
    };
  }

  /**
   * Run latency test
   */
  async runLatencyTest(
    queueName: string,
    sampleCount = 1000,
    messageSize = 1024
  ): Promise<PerformanceTestResult> {
    this.logger.info(`Starting latency test: ${sampleCount} samples`);

    // Setup queue
    const connection = await this.connectionPool.getConnection();
    const channel = connection.channel;
    await channel.assertQueue(queueName, { durable: false });
    await this.connectionPool.returnConnection(connection);

    const producer = new Producer(this.connectionPool);
    await producer.initialize();

    const latencies: number[] = [];
    const messageData = 'x'.repeat(messageSize);

    for (let i = 0; i < sampleCount; i++) {
      const startTime = Date.now();

      const message = new Message({
        sampleId: i,
        data: messageData,
        timestamp: startTime,
      });

      await producer.send(message, { routingKey: queueName });

      const endTime = Date.now();
      latencies.push(endTime - startTime);

      // Small delay to avoid overwhelming the system
      if (i % 100 === 0) {
        await new Promise(resolve => setTimeout(resolve, 10));
      }
    }

    await producer.close();

    // Calculate statistics
    const sortedLatencies = latencies.sort((a, b) => a - b);
    const avgLatency = latencies.reduce((sum, lat) => sum + lat, 0) / latencies.length;
    const minLatency = sortedLatencies[0];
    const maxLatency = sortedLatencies[sortedLatencies.length - 1];
    const p95Latency = sortedLatencies[Math.floor(sortedLatencies.length * 0.95)];
    const p99Latency = sortedLatencies[Math.floor(sortedLatencies.length * 0.99)];

    this.logger.info(
      `Latency test completed: avg=${avgLatency.toFixed(2)}ms, p95=${p95Latency}ms, p99=${p99Latency}ms`
    );

    return {
      messageCount: sampleCount,
      duration: 0, // Not applicable for latency test
      throughput: 0, // Not applicable for latency test
      concurrentProducers: 1,
      latencyStats: {
        min: minLatency,
        max: maxLatency,
        avg: avgLatency,
        p95: p95Latency,
        p99: p99Latency,
      },
    };
  }

  /**
   * Run end-to-end test (producer -> consumer)
   */
  async runEndToEndTest(
    queueName: string,
    messageCount: number,
    concurrentConsumers = 5,
    messageSize = 1024
  ): Promise<PerformanceTestResult> {
    this.logger.info(
      `Starting end-to-end test: ${messageCount} messages, ${concurrentConsumers} consumers`
    );

    // Setup queue
    const connection = await this.connectionPool.getConnection();
    const channel = connection.channel;
    await channel.assertQueue(queueName, { durable: false });
    await this.connectionPool.returnConnection(connection);

    const processedMessages: Array<{ latency: number; timestamp: number }> = [];
    const processedLock = { count: 0 };

    // Create handler that tracks processing time
    const handler = createFunctionHandler(async (messageBody: any) => {
      const now = Date.now();
      const latency = now - messageBody.timestamp;

      processedMessages.push({ latency, timestamp: now });
      processedLock.count++;

      return { processed: true };
    });

    // Start consumers
    const consumers: Consumer[] = [];
    for (let i = 0; i < concurrentConsumers; i++) {
      const consumer = new Consumer(this.connectionPool, handler);
      await consumer.startConsuming({
        queueName,
        prefetchCount: 10,
        autoAck: false,
      });
      consumers.push(consumer);
    }

    // Wait a bit for consumers to be ready
    await new Promise(resolve => setTimeout(resolve, 1000));

    // Send messages
    const producer = new Producer(this.connectionPool);
    await producer.initialize();

    const messageData = 'x'.repeat(messageSize);
    const startTime = Date.now();

    for (let i = 0; i < messageCount; i++) {
      const message = new Message({
        messageId: i,
        data: messageData,
        timestamp: Date.now(),
      });

      await producer.send(message, { routingKey: queueName });
    }

    // Wait for all messages to be processed
    while (processedLock.count < messageCount) {
      await new Promise(resolve => setTimeout(resolve, 100));
    }

    const endTime = Date.now();
    const duration = endTime - startTime;
    const throughput = messageCount / (duration / 1000);

    // Calculate latency statistics
    const latencies = processedMessages.map(m => m.latency);
    const sortedLatencies = latencies.sort((a, b) => a - b);
    const avgLatency = latencies.reduce((sum, lat) => sum + lat, 0) / latencies.length;
    const minLatency = sortedLatencies[0];
    const maxLatency = sortedLatencies[sortedLatencies.length - 1];
    const p95Latency = sortedLatencies[Math.floor(sortedLatencies.length * 0.95)];
    const p99Latency = sortedLatencies[Math.floor(sortedLatencies.length * 0.99)];

    // Cleanup
    await producer.close();
    await Promise.all(consumers.map(consumer => consumer.close()));

    this.logger.info(
      `End-to-end test completed: ${throughput.toFixed(2)} messages/second, avg latency=${avgLatency.toFixed(2)}ms`
    );

    return {
      messageCount,
      duration,
      throughput,
      concurrentProducers: 1,
      latencyStats: {
        min: minLatency,
        max: maxLatency,
        avg: avgLatency,
        p95: p95Latency,
        p99: p99Latency,
      },
    };
  }

  /**
   * Run comprehensive performance test suite
   */
  async runTestSuite(queueName: string): Promise<{
    throughput: PerformanceTestResult;
    latency: PerformanceTestResult;
    endToEnd: PerformanceTestResult;
  }> {
    this.logger.info('Starting comprehensive performance test suite');

    const throughputResult = await this.runThroughputTest(queueName + '_throughput', 10000, 10);
    await new Promise(resolve => setTimeout(resolve, 2000)); // Cool down

    const latencyResult = await this.runLatencyTest(queueName + '_latency', 1000);
    await new Promise(resolve => setTimeout(resolve, 2000)); // Cool down

    const endToEndResult = await this.runEndToEndTest(queueName + '_e2e', 5000, 5);

    this.logger.info('Performance test suite completed');

    return {
      throughput: throughputResult,
      latency: latencyResult,
      endToEnd: endToEndResult,
    };
  }
}

/**
 * Utility function to create performance tester
 */
export const createPerformanceTester = (
  connectionPool: ConnectionPool,
  logger?: Logger
): PerformanceTester => {
  return new PerformanceTester(connectionPool, logger);
};
