package com.example.icar.rosmaster;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

/**
 * Rosmaster摄像头类，用于处理摄像头流
 */
public class RosmasterCamera {
    private static final String TAG = "RosmasterCamera";
    
    // 摄像头类型
    public static final int TYPE_LOCAL_CAMERA = 0;
    public static final int TYPE_DEPTH_CAMERA = 1;
    
    // 重试配置
    private static final int MAX_RETRY_COUNT = 10; // 增加重试次数
    private static final int RETRY_DELAY_MS = 1000; // 增加重试延迟
    
    // 帧率控制
    private static long MIN_FRAME_INTERVAL = 100; // 最小帧间隔，约10fps，增加到100ms以降低帧率
    private static int SAMPLE_SIZE = 4; // 缩小为原来的1/4，进一步降低分辨率
    
    // 缓冲区大小
    private static final int BUFFER_SIZE = 262144; // 256KB缓冲区
    private static final int PUSHBACK_SIZE = 8192; // 8KB回推缓冲区
    
    // 线程池
    private ExecutorService executorService;
    private ExecutorService decodeExecutor; // 专用于解码的线程池
    private Handler mainHandler;
    
    private AtomicBoolean isStreaming = new AtomicBoolean(false);
    private CameraCallback callback;
    private int retryCount = 0;
    
    /**
     * 构造函数
     */
    public RosmasterCamera() {
        executorService = Executors.newCachedThreadPool();
        decodeExecutor = Executors.newSingleThreadExecutor(); // 单线程解码器，避免内存竞争
        mainHandler = new Handler(Looper.getMainLooper());
    }
    
    /**
     * 开始流式传输
     * @param ipAddress 摄像头IP地址
     * @param port 摄像头端口
     * @param callback 回调接口
     */
    public void startStreaming(String ipAddress, int port, CameraCallback callback) {
        if (isStreaming.get()) {
            Log.d(TAG, "摄像头已经在流式传输中");
            return;
        }
        
        this.callback = callback;
        isStreaming.set(true);
        retryCount = 0;
        
        // 构建MJPEG流URL
        String streamUrl = "http://" + ipAddress + ":" + port + "/?action=stream";
        Log.d(TAG, "开始从 " + streamUrl + " 获取摄像头流");
        
        startStreamingWithRetry(streamUrl);
    }
    
    /**
     * 带重试机制的流式传输
     * @param streamUrl 摄像头流URL
     */
    private void startStreamingWithRetry(String streamUrl) {
        executorService.execute(() -> {
            while (isStreaming.get()) {
                HttpURLConnection connection = null;
                InputStream inputStream = null;
                
                try {
                    // 获取摄像头图像
                    Log.d(TAG, "尝试连接摄像头流: " + streamUrl + " (尝试 " + (retryCount + 1) + "/" + MAX_RETRY_COUNT + ")");
                    URL url = new URL(streamUrl);
                    connection = (HttpURLConnection) url.openConnection();
                    connection.setConnectTimeout(20000);  // 增加连接超时时间到20秒
                    connection.setReadTimeout(20000);     // 增加读取超时时间到20秒
                    connection.setRequestProperty("Connection", "close"); // 避免保持连接
                    connection.setRequestProperty("User-Agent", "RosmasterApp/1.0"); // 添加User-Agent
                    
                    int responseCode = connection.getResponseCode();
                    Log.d(TAG, "摄像头连接响应码: " + responseCode);
                    
                    if (responseCode == HttpURLConnection.HTTP_OK) {
                        // 连接成功，重置重试计数
                        retryCount = 0;
                        
                        // 检查内容类型
                        String contentType = connection.getContentType();
                        Log.d(TAG, "摄像头内容类型: " + contentType);
                        
                        inputStream = connection.getInputStream();
                        
                        // 根据内容类型处理不同的流格式
                        if (contentType != null && contentType.contains("multipart/x-mixed-replace")) {
                            // MJPEG流
                            handleMjpegStream(inputStream);
                        } else {
                            // 单张图像或其他格式
                            handleSingleImage(inputStream);
                        }
                    } else {
                        Log.e(TAG, "摄像头连接失败，响应码: " + responseCode);
                        handleStreamingError("HTTP错误: " + responseCode);
                    }
                } catch (java.net.SocketTimeoutException e) {
                    Log.e(TAG, "摄像头连接超时: " + e.toString());
                    handleStreamingError("连接超时: " + e.getMessage());
                } catch (java.net.ConnectException e) {
                    Log.e(TAG, "摄像头连接被拒绝: " + e.toString());
                    handleStreamingError("连接被拒绝: " + e.getMessage());
                } catch (IOException e) {
                    Log.e(TAG, "摄像头流IO错误: " + e.toString());
                    handleStreamingError("IO错误: " + e.getMessage());
                } catch (InterruptedException e) {
                    Log.e(TAG, "摄像头流中断: " + e.toString());
                    handleStreamingError("线程中断");
                } catch (Exception e) {
                    Log.e(TAG, "摄像头流未知错误: " + e.toString());
                    handleStreamingError("未知错误: " + e.getMessage());
                } finally {
                    // 关闭资源
                    if (inputStream != null) {
                        try {
                            inputStream.close();
                        } catch (IOException e) {
                            Log.e(TAG, "关闭输入流错误: " + e.toString());
                        }
                    }
                    
                    if (connection != null) {
                        connection.disconnect();
                    }
                    
                    // 如果流已经停止，不再重试
                    if (!isStreaming.get()) {
                        break;
                    }
                    
                    // 短暂延迟后重试
                    try {
                        Thread.sleep(RETRY_DELAY_MS);
                    } catch (InterruptedException e) {
                        Log.e(TAG, "重试等待被中断: " + e.toString());
                    }
                }
            }
        });
    }
    
    /**
     * 使用PushbackInputStream处理MJPEG流
     * @param inputStream 输入流
     */
    private void handleMjpegStream(InputStream inputStream) throws IOException, InterruptedException {
        PushbackInputStream pushbackStream = new PushbackInputStream(inputStream, PUSHBACK_SIZE);
        byte[] buffer = new byte[BUFFER_SIZE];
        int bytesRead;
        
        // 用于帧率控制
        long lastFrameTime = 0;
        int frameCount = 0;
        
        // 寻找第一个边界
        byte[] boundaryBytes = findBoundaryMarker(pushbackStream);
        if (boundaryBytes == null) {
            Log.e(TAG, "无法找到MJPEG流边界");
            return;
        }
        
        String boundary = new String(boundaryBytes);
        Log.d(TAG, "找到MJPEG边界: " + boundary);
        
        while (isStreaming.get()) {
            try {
                // 读取HTTP头部
                ByteArrayOutputStream headerBuffer = new ByteArrayOutputStream();
                byte[] headerBytes = readUntil(pushbackStream, "\r\n\r\n".getBytes());
                if (headerBytes == null) {
                    Log.e(TAG, "无法读取HTTP头部");
                    break;
                }
                
                headerBuffer.write(headerBytes);
                headerBuffer.write("\r\n\r\n".getBytes());
                
                // 解析内容长度
                String headerString = headerBuffer.toString();
                int contentLength = parseContentLength(headerString);
                
                if (contentLength <= 0) {
                    Log.e(TAG, "无效的内容长度: " + contentLength);
                    // 尝试寻找下一个边界
                    findBoundaryMarker(pushbackStream);
                    continue;
                }
                
                // 读取图像数据
                byte[] imageData = new byte[contentLength];
                int totalBytesRead = 0;
                int readLength;
                
                while (totalBytesRead < contentLength && (readLength = pushbackStream.read(imageData, totalBytesRead, contentLength - totalBytesRead)) != -1) {
                    totalBytesRead += readLength;
                }
                
                if (totalBytesRead < contentLength) {
                    Log.e(TAG, "图像数据不完整: " + totalBytesRead + "/" + contentLength);
                    continue;
                }
                
                // 帧率控制
                long currentTime = System.currentTimeMillis();
                frameCount++;
                
                // 每3帧处理1帧，并确保最小帧间隔
                if (frameCount % 3 == 0 && currentTime - lastFrameTime >= MIN_FRAME_INTERVAL) {
                    // 更新帧时间戳，放在这里而不是在协程内部
                    lastFrameTime = currentTime;
                    
                    // 在专用线程中解码图像
                    final byte[] finalImageData = imageData;
                    decodeExecutor.execute(() -> {
                        try {
                            decodeAndProcessImage(finalImageData);
                            // 移除这里的时间戳更新，因为它在协程中，不会影响外部的帧率控制
                        } catch (Exception e) {
                            Log.e(TAG, "图像解码错误: " + e.getMessage());
                        }
                    });
                }
                
                // 寻找下一个边界
                byte[] boundaryMarker = findBoundaryMarker(pushbackStream);
                if (boundaryMarker == null) {
                    Log.e(TAG, "无法找到下一个边界");
                    break;
                }
            } catch (Exception e) {
                Log.e(TAG, "处理MJPEG帧错误: " + e.getMessage());
                // 尝试恢复流
                try {
                    findBoundaryMarker(pushbackStream);
                } catch (Exception ex) {
                    Log.e(TAG, "恢复流失败: " + ex.getMessage());
                    break;
                }
            }
        }
    }
    
    /**
     * 寻找MJPEG流边界标记
     */
    private byte[] findBoundaryMarker(PushbackInputStream stream) throws IOException {
        byte[] buffer = new byte[PUSHBACK_SIZE];
        int bytesRead;
        
        // 读取一段数据
        bytesRead = stream.read(buffer, 0, buffer.length);
        if (bytesRead == -1) {
            return null;
        }
        
        // 寻找边界标记 "--"
        int boundaryStart = -1;
        for (int i = 0; i < bytesRead - 1; i++) {
            if (buffer[i] == '-' && buffer[i + 1] == '-') {
                boundaryStart = i;
                break;
            }
        }
        
        if (boundaryStart == -1) {
            // 没有找到边界标记，继续寻找
            return findBoundaryMarker(stream);
        }
        
        // 寻找边界结束位置 (CRLF)
        int boundaryEnd = -1;
        for (int i = boundaryStart; i < bytesRead - 1; i++) {
            if (buffer[i] == '\r' && buffer[i + 1] == '\n') {
                boundaryEnd = i;
                break;
            }
        }
        
        if (boundaryEnd == -1) {
            // 边界没有完整读取，回推数据并重试
            stream.unread(buffer, 0, bytesRead);
            return findBoundaryMarker(stream);
        }
        
        // 提取边界字符串
        byte[] boundary = new byte[boundaryEnd - boundaryStart];
        System.arraycopy(buffer, boundaryStart, boundary, 0, boundary.length);
        
        // 回推剩余数据
        if (bytesRead > boundaryEnd + 2) {
            stream.unread(buffer, boundaryEnd + 2, bytesRead - boundaryEnd - 2);
        }
        
        return boundary;
    }
    
    /**
     * 读取直到特定字节序列
     */
    private byte[] readUntil(PushbackInputStream stream, byte[] endSequence) throws IOException {
        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        int endIndex = 0;
        int b;
        
        while ((b = stream.read()) != -1) {
            buffer.write(b);
            
            if (b == endSequence[endIndex]) {
                endIndex++;
                if (endIndex == endSequence.length) {
                    // 找到结束序列
                    byte[] result = buffer.toByteArray();
                    // 移除结束序列
                    byte[] trimmedResult = new byte[result.length - endSequence.length];
                    System.arraycopy(result, 0, trimmedResult, 0, trimmedResult.length);
                    return trimmedResult;
                }
            } else {
                endIndex = 0;
            }
        }
        
        return null;
    }
    
    /**
     * 从HTTP头部解析内容长度
     */
    private int parseContentLength(String header) {
        String contentLengthMarker = "Content-Length:";
        int contentLengthIndex = header.indexOf(contentLengthMarker);
        
        if (contentLengthIndex == -1) {
            return -1;
        }
        
        int startIndex = contentLengthIndex + contentLengthMarker.length();
        int endIndex = header.indexOf("\r\n", startIndex);
        
        if (endIndex == -1) {
            return -1;
        }
        
        String lengthStr = header.substring(startIndex, endIndex).trim();
        try {
            return Integer.parseInt(lengthStr);
        } catch (NumberFormatException e) {
            return -1;
        }
    }
    
    /**
     * 解码并处理图像
     */
    private void decodeAndProcessImage(byte[] imageData) {
        BitmapFactory.Options options = new BitmapFactory.Options();
        options.inSampleSize = SAMPLE_SIZE;
        options.inPreferredConfig = Bitmap.Config.RGB_565; // 使用16位色彩
        options.inDither = true; // 启用抖动以改善显示效果
        
        try {
            final Bitmap bitmap = BitmapFactory.decodeByteArray(imageData, 0, imageData.length, options);
            
            if (bitmap != null && isStreaming.get() && callback != null) {
                // 在主线程中回调
                mainHandler.post(() -> {
                    if (isStreaming.get() && callback != null) {
                        callback.onFrameReceived(bitmap);
                    } else {
                        bitmap.recycle(); // 如果不再需要，回收位图
                    }
                });
            }
        } catch (OutOfMemoryError e) {
            Log.e(TAG, "解码图像内存不足: " + e.toString());
            // 增加采样率以减少内存使用
            SAMPLE_SIZE = Math.min(SAMPLE_SIZE * 2, 8);
            Log.d(TAG, "增加采样率到: " + SAMPLE_SIZE);
        } catch (Exception e) {
            Log.e(TAG, "解码图像失败: " + e.toString());
        }
    }
    
    /**
     * 处理单张图像
     * @param inputStream 输入流
     */
    private void handleSingleImage(InputStream inputStream) throws IOException {
        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        byte[] data = new byte[16384]; // 16KB缓冲区
        int bytesRead;
        
        // 读取所有数据
        while ((bytesRead = inputStream.read(data)) != -1) {
            buffer.write(data, 0, bytesRead);
        }
        
        // 在解码线程中处理
        final byte[] imageData = buffer.toByteArray();
        decodeExecutor.execute(() -> {
            decodeAndProcessImage(imageData);
        });
    }
    
    /**
     * 处理流式传输错误
     * @param errorMessage 错误信息
     */
    private void handleStreamingError(String errorMessage) {
        if (!isStreaming.get()) {
            return;
        }
        
        retryCount++;
        
        if (retryCount <= MAX_RETRY_COUNT) {
            Log.w(TAG, "摄像头连接失败，尝试重试 " + retryCount + "/" + MAX_RETRY_COUNT);
            
            try {
                // 等待一段时间再重试
                Thread.sleep(RETRY_DELAY_MS);
            } catch (InterruptedException e) {
                Log.e(TAG, "重试等待被中断: " + e.toString());
            }
        } else {
            Log.e(TAG, "摄像头连接失败，超过最大重试次数");
            final String finalErrorMessage = errorMessage;
            mainHandler.post(() -> {
                if (callback != null) {
                    callback.onError(finalErrorMessage + "（已重试" + MAX_RETRY_COUNT + "次）");
                }
            });
            stopStreaming();
        }
    }
    
    /**
     * 停止流式传输
     */
    public void stopStreaming() {
        Log.d(TAG, "停止摄像头流式传输");
        isStreaming.set(false);
    }
    
    /**
     * 开始相机
     * @param cameraUrl 相机URL
     * @param callback 回调
     */
    public void startCamera(String cameraUrl, CameraCallback callback) {
        if (isStreaming.get()) {
            Log.d(TAG, "相机已经在运行中");
            return;
        }
        
        this.callback = callback;
        isStreaming.set(true);
        retryCount = 0;
        
        Log.d(TAG, "开始从 " + cameraUrl + " 获取相机流");
        
        // 检查是否是RTSP流
        boolean isRtsp = cameraUrl.toLowerCase().startsWith("rtsp://");
        
        if (isRtsp) {
            // RTSP流需要特殊处理，降低帧间隔以获得更高的实时性
            MIN_FRAME_INTERVAL = 50; // 降低到50ms，约20fps
            SAMPLE_SIZE = 2;         // 降低到1/2，提高清晰度
            
            // 使用单独的方法处理RTSP流，以获得更低的延迟
            startRtspStreaming(cameraUrl);
        } else {
            // 普通HTTP流
            startStreamingWithRetry(cameraUrl);
        }
    }
    
    /**
     * 处理RTSP流
     * @param rtspUrl RTSP URL
     */
    private void startRtspStreaming(String rtspUrl) {
        Log.d(TAG, "开始RTSP流处理: " + rtspUrl);
        
        executorService.execute(() -> {
            try {
                // 尝试使用标准HTTP连接方式连接RTSP流
                URL url = new URL(rtspUrl);
                HttpURLConnection connection = (HttpURLConnection) url.openConnection();
                connection.setConnectTimeout(10000);  // 10秒连接超时
                connection.setReadTimeout(5000);      // 5秒读取超时
                connection.setRequestProperty("Connection", "close");
                connection.setRequestProperty("User-Agent", "RosmasterApp/1.0 RTSP Client");
                
                try {
                    int responseCode = connection.getResponseCode();
                    Log.d(TAG, "RTSP连接响应码: " + responseCode);
                    
                    if (responseCode == HttpURLConnection.HTTP_OK) {
                        // 连接成功，重置重试计数
                        retryCount = 0;
                        
                        InputStream inputStream = connection.getInputStream();
                        PushbackInputStream pushbackStream = new PushbackInputStream(inputStream, PUSHBACK_SIZE);
                        
                        // 使用更大的缓冲区和更低的帧间隔处理RTSP流
                        byte[] buffer = new byte[BUFFER_SIZE];
                        int bytesRead;
                        long lastFrameTime = 0;
                        int frameCount = 0;
                        
                        while (isStreaming.get() && (bytesRead = pushbackStream.read(buffer)) != -1) {
                            // 直接处理每一帧，不进行MJPEG解析
                            long currentTime = System.currentTimeMillis();
                            
                            // 每2帧处理1帧，但确保至少30ms的帧间隔
                            frameCount++;
                            if (frameCount % 2 == 0 && currentTime - lastFrameTime >= 30) {
                                // 更新帧时间戳，放在这里而不是在协程内部
                                lastFrameTime = currentTime;
                                
                                // 复制数据以避免缓冲区被覆盖
                                final byte[] frameData = new byte[bytesRead];
                                System.arraycopy(buffer, 0, frameData, 0, bytesRead);
                                
                                // 在解码线程中处理
                                decodeExecutor.execute(() -> {
                                    BitmapFactory.Options options = new BitmapFactory.Options();
                                    options.inSampleSize = SAMPLE_SIZE;
                                    options.inPreferredConfig = Bitmap.Config.RGB_565;
                                    
                                    try {
                                        final Bitmap bitmap = BitmapFactory.decodeByteArray(frameData, 0, frameData.length, options);
                                        if (bitmap != null && isStreaming.get()) {
                                            mainHandler.post(() -> {
                                                if (isStreaming.get() && callback != null) {
                                                    callback.onFrameReceived(bitmap);
                                                } else {
                                                    bitmap.recycle();
                                                }
                                            });
                                        }
                                    } catch (OutOfMemoryError e) {
                                        Log.e(TAG, "解码RTSP帧内存不足: " + e.toString());
                                        // 增加采样率以减少内存使用
                                        SAMPLE_SIZE = Math.min(SAMPLE_SIZE * 2, 8);
                                    } catch (Exception e) {
                                        Log.e(TAG, "解码RTSP帧失败: " + e.toString());
                                    }
                                });
                                
                                // 移除这里的时间戳更新，已经在上面更新过了
                            }
                            
                            // 使用短暂休眠而不是yield，确保不会过载CPU
                            Thread.sleep(1);
                        }
                        
                        inputStream.close();
                    } else {
                        Log.e(TAG, "RTSP连接失败，响应码: " + responseCode);
                        handleStreamingError("RTSP错误: " + responseCode);
                    }
                } catch (Exception e) {
                    Log.e(TAG, "RTSP流处理错误: " + e.toString());
                    
                    // 尝试使用备用方法处理RTSP流
                    tryAlternativeRtspMethod(rtspUrl);
                }
            } catch (Exception e) {
                Log.e(TAG, "RTSP流初始化错误: " + e.toString());
                handleStreamingError("RTSP错误: " + e.getMessage());
            }
        });
    }
    
    /**
     * 尝试备用方法处理RTSP流
     * @param rtspUrl RTSP URL
     */
    private void tryAlternativeRtspMethod(String rtspUrl) {
        Log.d(TAG, "尝试备用方法处理RTSP流: " + rtspUrl);
        
        try {
            // 将RTSP URL转换为HTTP URL，尝试通过HTTP获取RTSP流
            String httpUrl = rtspUrl.replace("rtsp://", "http://");
            Log.d(TAG, "尝试通过HTTP获取RTSP流: " + httpUrl);
            
            // 使用标准流处理方法
            startStreamingWithRetry(httpUrl);
        } catch (Exception e) {
            Log.e(TAG, "备用RTSP方法失败: " + e.toString());
            handleStreamingError("RTSP备用方法错误: " + e.getMessage());
        }
    }
    
    /**
     * 停止相机
     */
    public void stopCamera() {
        stopStreaming();
    }
    
    /**
     * 释放资源
     */
    public void release() {
        Log.d(TAG, "释放摄像头资源");
        stopStreaming();
        
        if (decodeExecutor != null) {
            decodeExecutor.shutdown();
            try {
                if (!decodeExecutor.awaitTermination(2, TimeUnit.SECONDS)) {
                    decodeExecutor.shutdownNow();
                }
            } catch (InterruptedException e) {
                decodeExecutor.shutdownNow();
            }
        }
        
        if (executorService != null) {
            executorService.shutdown();
            try {
                // 等待任务完成
                if (!executorService.awaitTermination(2, TimeUnit.SECONDS)) {
                    executorService.shutdownNow();
                }
            } catch (InterruptedException e) {
                executorService.shutdownNow();
                Thread.currentThread().interrupt();
            }
        }
    }
    
    /**
     * 摄像头回调接口
     */
    public interface CameraCallback {
        /**
         * 当收到帧时调用
         * @param bitmap 帧图像
         */
        void onFrameReceived(Bitmap bitmap);
        
        /**
         * 当发生错误时调用
         * @param error 错误信息
         */
        void onError(String error);
    }
} 