package com.wondertek.onvif.service;

import cn.hutool.core.util.StrUtil;
import com.wondertek.onvif.entity.OnvifChannel;
import com.wondertek.onvif.repository.OnvifChannelRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.bytedeco.javacv.*;
import org.bytedeco.javacv.Frame;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

/**
 * 流媒体服务
 * 实现视频流的转发、转码和播放
 * 
 * @author wondertek
 * @version 1.0.0
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class StreamingService {

    private static final Logger log = LoggerFactory.getLogger(StreamingService.class);
    
    private final OnvifChannelRepository channelRepository;
    private final ExecutorService executorService = Executors.newCachedThreadPool();
    
    // 活跃的流会话
    private final Map<String, StreamSession> activeSessions = new ConcurrentHashMap<>();
    
    // MJPEG流发射器
    private final Map<String, SseEmitter> mjpegEmitters = new ConcurrentHashMap<>();

    /**
     * 开始播放通道视频流
     * 
     * @param channelId 通道ID
     * @param quality 视频质量 (high/medium/low)
     * @return 流会话ID
     */
    public String startStream(Long channelId, String quality) {
        OnvifChannel channel = channelRepository.findById(channelId)
            .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
        
        if (StrUtil.isEmpty(channel.getRtspUrl())) {
            throw new RuntimeException("通道未配置RTSP地址: " + channelId);
        }
        
        String sessionId = "stream_" + channelId + "_" + System.currentTimeMillis();
        
        log.info("开始播放视频流: {} - {} (质量: {})", sessionId, channel.getRtspUrl(), quality);
        
        StreamSession session = new StreamSession(sessionId, channel);
        activeSessions.put(sessionId, session);
        
        // 异步启动流处理
        Future<?> future = executorService.submit(() -> processStream(session));
        session.setFuture(future);
        
        return sessionId;
    }

    /**
     * 停止播放通道视频流
     * 
     * @param channelId 通道ID
     * @param sessionId 流会话ID
     */
    public void stopStream(Long channelId, String sessionId) {
        StreamSession session = activeSessions.remove(sessionId);
        if (session != null) {
            log.info("停止播放视频流: {} (通道: {})", sessionId, channelId);
            session.stop();
        } else {
            log.warn("未找到会话: {} (通道: {})", sessionId, channelId);
        }
    }

    /**
     * 获取MJPEG流
     * 
     * @param channelId 通道ID
     * @return SSE发射器
     */
    public SseEmitter getMjpegStream(Long channelId) {
        OnvifChannel channel = channelRepository.findById(channelId)
            .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
        
        if (StrUtil.isEmpty(channel.getRtspUrl())) {
            throw new RuntimeException("通道未配置RTSP地址: " + channelId);
        }
        
        String emitterId = "mjpeg_" + channelId;
        
        // 如果已有发射器，先关闭
        SseEmitter existingEmitter = mjpegEmitters.remove(emitterId);
        if (existingEmitter != null) {
            existingEmitter.complete();
        }
        
        // 创建新的发射器
        SseEmitter emitter = new SseEmitter(Long.MAX_VALUE);
        mjpegEmitters.put(emitterId, emitter);
        
        // 设置完成和超时回调
        emitter.onCompletion(() -> {
            mjpegEmitters.remove(emitterId);
            log.info("MJPEG流完成: {}", emitterId);
        });
        
        emitter.onTimeout(() -> {
            mjpegEmitters.remove(emitterId);
            log.info("MJPEG流超时: {}", emitterId);
        });
        
        emitter.onError((ex) -> {
            mjpegEmitters.remove(emitterId);
            log.error("MJPEG流错误: {} - {}", emitterId, ex.getMessage());
        });
        
        // 异步处理MJPEG流
        executorService.submit(() -> processMjpegStream(channel, emitter, emitterId));
        
        return emitter;
    }

    /**
     * SSE流传输
     * 
     * @param channelId 通道ID
     * @param quality 视频质量
     * @param emitter SSE发射器
     */
    public void streamSse(Long channelId, String quality, SseEmitter emitter) {
        OnvifChannel channel = channelRepository.findById(channelId)
            .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
        
        if (StrUtil.isEmpty(channel.getRtspUrl())) {
            throw new RuntimeException("通道未配置RTSP地址: " + channelId);
        }
        
        String emitterId = "sse_" + channelId + "_" + System.currentTimeMillis();
        
        // 设置发射器回调
        emitter.onCompletion(() -> {
            log.info("SSE流完成: {}", emitterId);
        });
        
        emitter.onTimeout(() -> {
            log.info("SSE流超时: {}", emitterId);
        });
        
        emitter.onError((ex) -> {
            log.error("SSE流错误: {} - {}", emitterId, ex.getMessage());
        });
        
        // 异步处理SSE流
        executorService.submit(() -> processSseStream(channel, emitter, emitterId, quality));
    }

    /**
     * 处理SSE流
     * 
     * @param channel 通道信息
     * @param emitter SSE发射器
     * @param emitterId 发射器ID
     * @param quality 视频质量
     */
    private void processSseStream(OnvifChannel channel, SseEmitter emitter, String emitterId, String quality) {
        FFmpegFrameGrabber grabber = null;
        Java2DFrameConverter converter = new Java2DFrameConverter();
        
        try {
            log.info("开始处理SSE流: {} - {}", emitterId, channel.getRtspUrl());
            
            grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
            grabber.setOption("rtsp_transport", "tcp");
            grabber.setOption("stimeout", "10000000"); // 10秒超时，单位微秒
            
            grabber.start();
            
            while (!Thread.currentThread().isInterrupted()) {
                Frame frame = grabber.grabImage();
                if (frame == null) {
                    Thread.sleep(33); // ~30fps
                    continue;
                }
                
                // 转换为JPEG并Base64编码
                BufferedImage bufferedImage = converter.convert(frame);
                if (bufferedImage != null) {
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    ImageIO.write(bufferedImage, "jpg", baos);
                    byte[] imageBytes = baos.toByteArray();
                    
                    // 编码为Base64并通过SSE发送
                    String base64Image = java.util.Base64.getEncoder().encodeToString(imageBytes);
                    String imageData = "data:image/jpeg;base64," + base64Image;
                    
                    emitter.send(SseEmitter.event()
                        .name("frame")
                        .data(imageData));
                }
                
                Thread.sleep(33); // ~30fps
            }
            
        } catch (Exception e) {
            log.error("SSE流处理失败: {}", e.getMessage(), e);
            try {
                emitter.completeWithError(e);
            } catch (Exception ex) {
                log.error("完成SSE发射器时出错", ex);
            }
        } finally {
            if (grabber != null) {
                try {
                    grabber.stop();
                    grabber.release();
                } catch (Exception e) {
                    log.error("释放grabber失败", e);
                }
            }
        }
    }

    /**
     * 获取流快照
     * 
     * @param channelId 通道ID
     * @return 快照图片字节数组
     */
    public byte[] getSnapshot(Long channelId) {
        OnvifChannel channel = channelRepository.findById(channelId)
            .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
        
        if (StrUtil.isEmpty(channel.getRtspUrl())) {
            throw new RuntimeException("通道未配置RTSP地址: " + channelId);
        }
        
        try {
            log.debug("获取流快照: {}", channel.getRtspUrl());
            
            FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
            grabber.setOption("rtsp_transport", "tcp");
            grabber.setOption("stimeout", "5000000"); // 5秒超时，单位微秒
            
            grabber.start();
            
            Frame frame = null;
            int attempts = 0;
            while (frame == null && attempts < 10) {
                frame = grabber.grabImage();
                attempts++;
            }
            
            if (frame == null) {
                throw new RuntimeException("无法获取视频帧");
            }
            
            Java2DFrameConverter converter = new Java2DFrameConverter();
            BufferedImage bufferedImage = converter.convert(frame);
            
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ImageIO.write(bufferedImage, "jpg", baos);
            
            grabber.stop();
            grabber.release();
            
            return baos.toByteArray();
            
        } catch (Exception e) {
            log.error("获取流快照失败: {} - {}", channel.getRtspUrl(), e.getMessage());
            throw new RuntimeException("获取快照失败: " + e.getMessage());
        }
    }

    /**
     * 处理视频流
     * 
     * @param session 流会话
     */
    private void processStream(StreamSession session) {
        FFmpegFrameGrabber grabber = null;
        try {
            OnvifChannel channel = session.getChannel();
            log.info("开始处理视频流: {} - {}", session.getSessionId(), channel.getRtspUrl());
            
            grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
            grabber.setOption("rtsp_transport", "tcp");
            grabber.setOption("stimeout", "10000000"); // 10秒超时，单位微秒
            
            grabber.start();
            session.setActive(true);
            
            while (session.isActive() && !Thread.currentThread().isInterrupted()) {
                Frame frame = grabber.grab();
                if (frame == null) {
                    Thread.sleep(33); // ~30fps
                    continue;
                }
                
                // 这里可以添加帧处理逻辑，如转码、录制等
                session.setLastFrameTime(System.currentTimeMillis());
            }
            
        } catch (Exception e) {
            log.error("处理视频流时发生错误: {} - {}", session.getSessionId(), e.getMessage());
        } finally {
            if (grabber != null) {
                try {
                    grabber.stop();
                    grabber.release();
                } catch (Exception e) {
                    log.warn("释放grabber时发生错误: {}", e.getMessage());
                }
            }
            session.setActive(false);
            activeSessions.remove(session.getSessionId());
            log.info("视频流处理结束: {}", session.getSessionId());
        }
    }

    /**
     * 处理MJPEG流
     * 
     * @param channel 通道
     * @param emitter SSE发射器
     * @param emitterId 发射器ID
     */
    private void processMjpegStream(OnvifChannel channel, SseEmitter emitter, String emitterId) {
        FFmpegFrameGrabber grabber = null;
        Java2DFrameConverter converter = new Java2DFrameConverter();
        
        try {
            log.info("开始处理MJPEG流: {} - {}", emitterId, channel.getRtspUrl());
            
            grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
            grabber.setOption("rtsp_transport", "tcp");
            grabber.setOption("stimeout", "10000000"); // 10秒超时，单位微秒
            
            grabber.start();
            
            while (mjpegEmitters.containsKey(emitterId)) {
                Frame frame = grabber.grabImage();
                if (frame == null) {
                    Thread.sleep(33); // ~30fps
                    continue;
                }
                
                // 转换为JPEG
                BufferedImage bufferedImage = converter.convert(frame);
                if (bufferedImage != null) {
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    ImageIO.write(bufferedImage, "jpg", baos);
                    byte[] imageBytes = baos.toByteArray();
                    
                    // 发送MJPEG帧 - 使用Base64编码传输二进制数据
                    String base64Image = java.util.Base64.getEncoder().encodeToString(imageBytes);
                    String mjpegData = "data:image/jpeg;base64," + base64Image;
                    
                    emitter.send(SseEmitter.event()
                        .name("frame")
                        .data(mjpegData, MediaType.TEXT_PLAIN));
                }
                
                Thread.sleep(33); // ~30fps
            }
            
        } catch (Exception e) {
            log.error("处理MJPEG流时发生错误: {} - {}", emitterId, e.getMessage());
            try {
                emitter.completeWithError(e);
            } catch (Exception ex) {
                // 忽略
            }
        } finally {
            if (grabber != null) {
                try {
                    grabber.stop();
                    grabber.release();
                } catch (Exception e) {
                    log.warn("释放MJPEG grabber时发生错误: {}", e.getMessage());
                }
            }
            mjpegEmitters.remove(emitterId);
            try {
                emitter.complete();
            } catch (Exception e) {
                // 忽略
            }
            log.info("MJPEG流处理结束: {}", emitterId);
        }
    }

    /**
     * 获取活跃流会话数量
     * 
     * @return 活跃会话数量
     */
    public int getActiveSessionCount() {
        return activeSessions.size();
    }

    /**
     * 获取MJPEG流数量
     * 
     * @return MJPEG流数量
     */
    public int getMjpegStreamCount() {
        return mjpegEmitters.size();
    }

    /**
     * 直接输出MJPEG流到OutputStream
     * 
     * @param channelId 通道ID
     * @param quality 视频质量
     * @param outputStream 输出流
     */
    public void streamMjpeg(Long channelId, String quality, java.io.OutputStream outputStream) {
        OnvifChannel channel = channelRepository.findById(channelId)
            .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
        
        if (StrUtil.isEmpty(channel.getRtspUrl())) {
            throw new RuntimeException("通道未配置RTSP地址: " + channelId);
        }
        
        FFmpegFrameGrabber grabber = null;
        Java2DFrameConverter converter = new Java2DFrameConverter();
        
        try {
            log.info("开始MJPEG流输出: {} - {}", channelId, channel.getRtspUrl());
            
            grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
            grabber.setOption("rtsp_transport", "tcp");
            grabber.setOption("stimeout", "10000000"); // 10秒超时，单位微秒
            
            grabber.start();
            
            while (!Thread.currentThread().isInterrupted()) {
                Frame frame = grabber.grabImage();
                if (frame == null) {
                    Thread.sleep(33); // ~30fps
                    continue;
                }
                
                // 转换为JPEG
                BufferedImage bufferedImage = converter.convert(frame);
                if (bufferedImage != null) {
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    ImageIO.write(bufferedImage, "jpg", baos);
                    byte[] imageBytes = baos.toByteArray();
                    
                    // 写入MJPEG帧
                    String boundary = "--frame\r\n";
                    String header = "Content-Type: image/jpeg\r\n" +
                                  "Content-Length: " + imageBytes.length + "\r\n\r\n";
                    
                    outputStream.write(boundary.getBytes());
                    outputStream.write(header.getBytes());
                    outputStream.write(imageBytes);
                    outputStream.write("\r\n".getBytes());
                    outputStream.flush();
                }
                
                Thread.sleep(33); // ~30fps
            }
            
        } catch (Exception e) {
            log.error("MJPEG流处理失败: {}", e.getMessage(), e);
            throw new RuntimeException("MJPEG流处理失败: " + e.getMessage(), e);
        } finally {
            if (grabber != null) {
                try {
                    grabber.stop();
                    grabber.release();
                } catch (Exception e) {
                    log.error("释放grabber失败", e);
                }
            }
        }
    }

    /**
     * 停止所有流会话
     * 
     * @return 停止的会话数量
     */
    public int stopAllSessions() {
        log.info("停止所有流会话...");
        
        int stoppedCount = activeSessions.size();
        
        // 停止所有流会话
        activeSessions.values().forEach(StreamSession::stop);
        activeSessions.clear();
        
        log.info("已停止 {} 个流会话", stoppedCount);
        return stoppedCount;
    }
    
    /**
     * 获取流媒体统计信息
     * 
     * @return 统计信息
     */
    public Map<String, Object> getStreamingStatistics() {
        Map<String, Object> statistics = new HashMap<>();
        
        statistics.put("activeSessionCount", activeSessions.size());
        statistics.put("mjpegStreamCount", mjpegEmitters.size());
        statistics.put("totalChannels", channelRepository.count());
        
        // 活跃会话详情
        List<Map<String, Object>> sessionDetails = new ArrayList<>();
        activeSessions.forEach((sessionId, session) -> {
            Map<String, Object> detail = new HashMap<>();
            detail.put("sessionId", sessionId);
            detail.put("channelId", session.getChannel().getId());
            detail.put("channelName", session.getChannel().getName());
            detail.put("active", session.isActive());
            detail.put("lastFrameTime", session.getLastFrameTime());
            sessionDetails.add(detail);
        });
        statistics.put("activeSessions", sessionDetails);
        
        // MJPEG流详情
        List<String> mjpegStreamIds = new ArrayList<>(mjpegEmitters.keySet());
        statistics.put("mjpegStreams", mjpegStreamIds);
        
        return statistics;
    }
    
    /**
     * 检查通道是否支持流媒体播放
     * 
     * @param channelId 通道ID
     * @return 是否支持
     */
    public boolean isStreamingSupported(Long channelId) {
        try {
            OnvifChannel channel = channelRepository.findById(channelId)
                .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
            
            // 检查是否配置了RTSP地址
            if (StrUtil.isEmpty(channel.getRtspUrl())) {
                log.warn("通道 {} 未配置RTSP地址", channelId);
                return false;
            }
            
            // 尝试连接RTSP流进行快速测试
            FFmpegFrameGrabber grabber = null;
            try {
                grabber = new FFmpegFrameGrabber(channel.getRtspUrl());
                grabber.setOption("rtsp_transport", "tcp");
                grabber.setOption("stimeout", "3000000"); // 3秒超时
                
                grabber.start();
                
                // 尝试获取一帧来验证流是否可用
                Frame frame = grabber.grabImage();
                boolean supported = frame != null;
                
                log.info("通道 {} 流媒体支持检查结果: {}", channelId, supported);
                return supported;
                
            } finally {
                if (grabber != null) {
                    try {
                        grabber.stop();
                        grabber.release();
                    } catch (Exception e) {
                        log.debug("释放测试grabber时出错", e);
                    }
                }
            }
            
        } catch (Exception e) {
             log.error("检查通道 {} 流媒体支持时出错: {}", channelId, e.getMessage());
             return false;
         }
     }
     
     /**
      * 获取指定通道的会话信息
      * 
      * @param channelId 通道ID
      * @return 会话信息
      */
     public Map<String, Object> getChannelSessions(Long channelId) {
         Map<String, Object> result = new HashMap<>();
         
         // 查找该通道的所有活跃会话
         List<Map<String, Object>> channelSessions = new ArrayList<>();
         activeSessions.forEach((sessionId, session) -> {
             if (session.getChannel().getId().equals(channelId)) {
                 Map<String, Object> sessionInfo = new HashMap<>();
                 sessionInfo.put("sessionId", sessionId);
                 sessionInfo.put("channelId", channelId);
                 sessionInfo.put("channelName", session.getChannel().getName());
                 sessionInfo.put("active", session.isActive());
                 sessionInfo.put("lastFrameTime", session.getLastFrameTime());
                 sessionInfo.put("rtspUrl", session.getChannel().getRtspUrl());
                 channelSessions.add(sessionInfo);
             }
         });
         
         // 查找该通道的MJPEG流
         List<String> mjpegStreams = new ArrayList<>();
         mjpegEmitters.keySet().forEach(emitterId -> {
             if (emitterId.contains("_" + channelId + "_") || emitterId.equals("mjpeg_" + channelId)) {
                 mjpegStreams.add(emitterId);
             }
         });
         
         result.put("channelId", channelId);
         result.put("sessionCount", channelSessions.size());
         result.put("sessions", channelSessions);
         result.put("mjpegStreamCount", mjpegStreams.size());
         result.put("mjpegStreams", mjpegStreams);
         
         return result;
     }
     
     /**
      * 获取流媒体URL
      * 
      * @param channelId 通道ID
      * @param quality 视频质量
      * @return URL信息
      */
     public Map<String, Object> getStreamUrls(Long channelId, String quality) {
         OnvifChannel channel = channelRepository.findById(channelId)
             .orElseThrow(() -> new RuntimeException("通道不存在: " + channelId));
         
         Map<String, Object> urls = new HashMap<>();
         
         // 基础URL路径
         String baseUrl = "/api/streaming/channels/" + channelId;
         
         // MJPEG流URL
         urls.put("mjpegUrl", baseUrl + "/mjpeg?quality=" + quality);
         
         // SSE流URL
         urls.put("sseUrl", baseUrl + "/sse?quality=" + quality);
         
         // 快照URL
         urls.put("snapshotUrl", baseUrl + "/snapshot");
         
         // 原始RTSP URL
         urls.put("rtspUrl", channel.getRtspUrl());
         
         // 流媒体支持检查URL
         urls.put("supportCheckUrl", baseUrl + "/support");
         
         // 会话管理URL
         urls.put("sessionsUrl", baseUrl + "/sessions");
         
         // 添加通道信息
         urls.put("channelId", channelId);
         urls.put("channelName", channel.getName());
         urls.put("quality", quality);
         
         // 检查流媒体支持状态
         try {
             boolean supported = isStreamingSupported(channelId);
             urls.put("supported", supported);
             urls.put("status", supported ? "available" : "unavailable");
         } catch (Exception e) {
             urls.put("supported", false);
             urls.put("status", "error");
             urls.put("error", e.getMessage());
         }
         
         return urls;
      }
      
      /**
       * 获取所有活跃会话
       * 
       * @return 活跃会话信息
       */
      public Map<String, Object> getActiveSessions() {
          Map<String, Object> result = new HashMap<>();
          
          // 活跃会话详情
          List<Map<String, Object>> sessionDetails = new ArrayList<>();
          activeSessions.forEach((sessionId, session) -> {
              Map<String, Object> detail = new HashMap<>();
              detail.put("sessionId", sessionId);
              detail.put("channelId", session.getChannel().getId());
              detail.put("channelName", session.getChannel().getName());
              detail.put("deviceId", session.getChannel().getDevice().getId());
              detail.put("deviceName", session.getChannel().getDevice().getName());
              detail.put("active", session.isActive());
              detail.put("lastFrameTime", session.getLastFrameTime());
              detail.put("rtspUrl", session.getChannel().getRtspUrl());
              sessionDetails.add(detail);
          });
          
          // MJPEG流详情
          List<String> mjpegStreamIds = new ArrayList<>(mjpegEmitters.keySet());
          
          result.put("totalSessions", activeSessions.size());
          result.put("sessions", sessionDetails);
          result.put("mjpegStreamCount", mjpegEmitters.size());
          result.put("mjpegStreams", mjpegStreamIds);
          result.put("timestamp", System.currentTimeMillis());
          
          return result;
      }

    /**
     * 清理所有流
     */
    public void cleanup() {
        log.info("清理所有流会话...");
        
        // 停止所有流会话
        activeSessions.values().forEach(StreamSession::stop);
        activeSessions.clear();
        
        // 完成所有MJPEG发射器
        mjpegEmitters.values().forEach(emitter -> {
            try {
                emitter.complete();
            } catch (Exception e) {
                // 忽略
            }
        });
        mjpegEmitters.clear();
    }

    /**
     * 流会话
     */
    private static class StreamSession {
        private final String sessionId;
        private final OnvifChannel channel;
        private volatile boolean active = false;
        private volatile long lastFrameTime = 0;
        private Future<?> future;

        public StreamSession(String sessionId, OnvifChannel channel) {
            this.sessionId = sessionId;
            this.channel = channel;
        }

        public void stop() {
            active = false;
            if (future != null && !future.isDone()) {
                future.cancel(true);
            }
        }

        // Getters and Setters
        public String getSessionId() { return sessionId; }
        public OnvifChannel getChannel() { return channel; }
        public boolean isActive() { return active; }
        public void setActive(boolean active) { this.active = active; }
        public long getLastFrameTime() { return lastFrameTime; }
        public void setLastFrameTime(long lastFrameTime) { this.lastFrameTime = lastFrameTime; }
        public Future<?> getFuture() { return future; }
        public void setFuture(Future<?> future) { this.future = future; }
    }
}