import { createProxyMiddleware, type Options } from 'http-proxy-middleware';
import { ProxyRule, BackendServer } from '@shared/schema';
import { storage } from './storage';
import { WebSocket } from 'ws';
import { Server as HttpServer, createServer } from 'http';
import { Server as NetServer, createServer as createNetServer } from 'net';
import express from 'express';

interface ProxyInstance {
  rule: ProxyRule;
  middleware?: any;
  httpServer?: HttpServer;
  tcpServer?: NetServer;
  servers: BackendServer[];
  currentServerIndex: number;
}

export class ProxyManager {
  private activeProxies: Map<number, ProxyInstance> = new Map();
  private wsConnections: Set<WebSocket> = new Set();

  constructor() {
    this.startHealthChecks();
    this.startMetricsCollection();
  }

  addWebSocketConnection(ws: WebSocket) {
    this.wsConnections.add(ws);
    ws.on('close', () => {
      this.wsConnections.delete(ws);
    });
  }

  private broadcast(data: any) {
    const message = JSON.stringify(data);
    this.wsConnections.forEach(ws => {
      if (ws.readyState === WebSocket.OPEN) {
        ws.send(message);
      }
    });
  }

  async startProxy(ruleId: number): Promise<boolean> {
    try {
      const rule = await storage.getProxyRule(ruleId);
      if (!rule) {
        throw new Error(`Proxy rule ${ruleId} not found`);
      }

      const servers = await storage.getBackendServers(ruleId);
      if (servers.length === 0) {
        throw new Error(`No backend servers configured for rule ${ruleId}`);
      }

      // Check if proxy is already running
      if (this.activeProxies.has(ruleId)) {
        await this.stopProxy(ruleId);
      }

      let instance: ProxyInstance;

      if (rule.type === 'HTTP' || rule.type === 'HTTPS') {
        // Create HTTP/HTTPS proxy server
        instance = await this.createHttpProxy(rule, servers);
      } else if (rule.type === 'TCP' || rule.type === 'UDP') {
        // Create TCP/UDP proxy server
        instance = await this.createTcpProxy(rule, servers);
      } else {
        throw new Error(`Unsupported proxy type: ${rule.type}`);
      }

      this.activeProxies.set(ruleId, instance);
      await storage.updateProxyRule(ruleId, { status: 'running' });
      
      this.broadcast({
        type: 'PROXY_STARTED',
        data: { ruleId, ruleName: rule.name }
      });

      this.logRequest(ruleId, 'INFO', `Proxy started on ${rule.type} port ${rule.remotePort} -> ${servers.map(s => `${s.host}:${s.port}`).join(', ')}`);

      return true;
    } catch (error) {
      await storage.updateProxyRule(ruleId, { status: 'error' });
      this.logRequest(ruleId, 'ERROR', `Failed to start proxy: ${error}`);
      return false;
    }
  }

  private async createHttpProxy(rule: ProxyRule, servers: BackendServer[]): Promise<ProxyInstance> {
    const app = express();
    const healthyServers = servers.filter(s => s.status === 'healthy');
    
    const proxyOptions: Options = {
      target: `http://${healthyServers[0].host}:${healthyServers[0].port}`,
      changeOrigin: true,
      ws: true, // Enable WebSocket proxying
      router: (req) => {
        const instance = this.activeProxies.get(rule.id);
        if (!instance || instance.servers.length === 0) {
          return `http://${rule.targetHost}:${rule.localPort}`;
        }

        // Load balancing logic
        const server = this.selectServer(instance);
        return `http://${server.host}:${server.port}`;
      }
    };

    // Add logging middleware
    app.use((req, res, next) => {
      this.logRequest(rule.id, 'INFO', `${req.method} ${req.url} from ${req.ip}`);
      next();
    });

    const middleware = createProxyMiddleware(proxyOptions);
    app.use('/', middleware);

    // Add error handling middleware
    app.use((err: any, req: any, res: any, next: any) => {
      this.logRequest(rule.id, 'ERROR', `Proxy error: ${err.message} for ${req.method} ${req.url}`);
      if (!res.headersSent) {
        res.status(502).json({ error: 'Bad Gateway' });
      }
    });

    const httpServer = createServer(app);
    
    return new Promise((resolve, reject) => {
      httpServer.listen(rule.remotePort, '0.0.0.0', () => {
        resolve({
          rule,
          middleware,
          httpServer,
          servers: healthyServers,
          currentServerIndex: 0
        });
      });

      httpServer.on('error', (err) => {
        reject(new Error(`Failed to start HTTP proxy on port ${rule.remotePort}: ${err.message}`));
      });
    });
  }

  private async createTcpProxy(rule: ProxyRule, servers: BackendServer[]): Promise<ProxyInstance> {
    const healthyServers = servers.filter(s => s.status === 'healthy');
    
    if (healthyServers.length === 0) {
      throw new Error('No healthy servers available');
    }

    const tcpServer = createNetServer();
    let currentServerIndex = 0;

    tcpServer.on('connection', (clientSocket) => {
      // Select backend server using load balancing
      const server = healthyServers[currentServerIndex];
      currentServerIndex = (currentServerIndex + 1) % healthyServers.length;

      this.logRequest(rule.id, 'INFO', `New TCP connection from ${clientSocket.remoteAddress}:${clientSocket.remotePort} -> ${server.host}:${server.port}`);

      // Create connection to backend server
      const backendSocket = new (require('net').Socket)();
      
      backendSocket.connect(server.port, server.host, () => {
        this.logRequest(rule.id, 'INFO', `Connected to backend ${server.host}:${server.port}`);
      });

      // Pipe data between client and backend
      clientSocket.pipe(backendSocket);
      backendSocket.pipe(clientSocket);

      // Handle errors
      const handleError = (err: Error, source: string) => {
        this.logRequest(rule.id, 'ERROR', `TCP proxy error (${source}): ${err.message}`);
        clientSocket.destroy();
        backendSocket.destroy();
      };

      clientSocket.on('error', (err) => handleError(err, 'client'));
      backendSocket.on('error', (err) => handleError(err, 'backend'));

      // Handle connection close
      clientSocket.on('close', () => {
        this.logRequest(rule.id, 'INFO', `Client connection closed`);
        backendSocket.destroy();
      });

      backendSocket.on('close', () => {
        this.logRequest(rule.id, 'INFO', `Backend connection closed`);
        clientSocket.destroy();
      });
    });

    return new Promise((resolve, reject) => {
      tcpServer.listen(rule.remotePort, '0.0.0.0', () => {
        resolve({
          rule,
          tcpServer,
          servers: healthyServers,
          currentServerIndex: 0
        });
      });

      tcpServer.on('error', (err) => {
        reject(new Error(`Failed to start TCP proxy on port ${rule.remotePort}: ${err.message}`));
      });
    });
  }

  async stopProxy(ruleId: number): Promise<boolean> {
    try {
      const instance = this.activeProxies.get(ruleId);
      if (!instance) {
        return false;
      }

      // Close the appropriate server
      if (instance.httpServer) {
        instance.httpServer.close();
        this.logRequest(ruleId, 'INFO', `HTTP proxy server stopped on port ${instance.rule.remotePort}`);
      }
      
      if (instance.tcpServer) {
        instance.tcpServer.close();
        this.logRequest(ruleId, 'INFO', `TCP proxy server stopped on port ${instance.rule.remotePort}`);
      }

      this.activeProxies.delete(ruleId);
      await storage.updateProxyRule(ruleId, { status: 'stopped' });
      
      this.broadcast({
        type: 'PROXY_STOPPED',
        data: { ruleId, ruleName: instance.rule.name }
      });

      return true;
    } catch (error) {
      this.logRequest(ruleId, 'ERROR', `Failed to stop proxy: ${error}`);
      return false;
    }
  }

  private selectServer(instance: ProxyInstance): BackendServer {
    const { servers, rule } = instance;
    
    switch (rule.loadBalancing) {
      case 'round-robin':
        const server = servers[instance.currentServerIndex];
        instance.currentServerIndex = (instance.currentServerIndex + 1) % servers.length;
        return server;
      
      case 'least-connections':
        // Simplified implementation - in real scenario, track active connections
        return servers.reduce((prev, current) => {
          const prevTime = prev.responseTime || 0;
          const currentTime = current.responseTime || 0;
          return prevTime < currentTime ? prev : current;
        });
      
      case 'ip-hash':
        // Simplified implementation
        return servers[0];
      
      default:
        return servers[0];
    }
  }

  getProxyMiddleware(ruleId: number) {
    const instance = this.activeProxies.get(ruleId);
    return instance?.middleware;
  }

  private async logRequest(ruleId: number, level: string, message: string) {
    await storage.createProxyLog({
      level,
      message,
      proxyRuleId: ruleId,
      metadata: null
    });

    this.broadcast({
      type: 'NEW_LOG',
      data: { level, message, proxyRuleId: ruleId, timestamp: new Date() }
    });
  }

  private startHealthChecks() {
    setInterval(async () => {
      const servers = await storage.getBackendServers();
      
      for (const server of servers) {
        try {
          const startTime = Date.now();
          
          // Use AbortController for timeout instead of deprecated fetch timeout
          const controller = new AbortController();
          const timeoutId = setTimeout(() => controller.abort(), 5000);
          
          const response = await fetch(`http://${server.host}:${server.port}/health`, {
            method: 'HEAD',
            signal: controller.signal
          });
          
          clearTimeout(timeoutId);
          
          const responseTime = Date.now() - startTime;
          const isHealthy = response.ok;
          
          await storage.updateBackendServer(server.id, {
            status: isHealthy ? 'healthy' : 'unhealthy',
            responseTime,
          });

          // Update active proxy instances
          const activeProxiesArray = Array.from(this.activeProxies.entries());
          for (const [ruleId, instance] of activeProxiesArray) {
            if (instance.servers.some((s: BackendServer) => s.id === server.id)) {
              instance.servers = await storage.getBackendServers(ruleId);
            }
          }
        } catch (error) {
          await storage.updateBackendServer(server.id, {
            status: 'unhealthy',
          });
        }
      }
    }, 30000); // Check every 30 seconds
  }

  private startMetricsCollection() {
    setInterval(async () => {
      const metrics = {
        cpuUsage: Math.floor(Math.random() * 100), // In real app, use actual CPU monitoring
        memoryUsage: Math.floor(Math.random() * 100),
        networkIO: `${Math.floor(Math.random() * 1000)} MB/s`,
        activeConnections: this.activeProxies.size,
        totalTraffic: `${(Math.random() * 10).toFixed(1)}GB`,
        errorRate: `${(Math.random() * 0.5).toFixed(2)}%`,
        avgResponseTime: Math.floor(Math.random() * 200) + 20
      };

      await storage.createSystemMetrics(metrics);
      
      this.broadcast({
        type: 'METRICS_UPDATE',
        data: metrics
      });
    }, 5000); // Update every 5 seconds
  }

  getActiveProxiesCount(): number {
    return this.activeProxies.size;
  }

  async restartAllProxies(): Promise<void> {
    const rules = await storage.getProxyRules();
    const runningRules = rules.filter(r => r.status === 'running');
    
    for (const rule of runningRules) {
      await this.stopProxy(rule.id);
      await this.startProxy(rule.id);
    }
  }
}

export const proxyManager = new ProxyManager();
