/*
 *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

package org.webrtc.yl.WebSocket;

import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;

import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.SystemClock;
import android.util.Log;
import android.os.Environment;
import com.camera.base.inerface.FramePusher;
import com.camera.base.inerface.ICPInterface;
import com.camera.base.inerface.Pusher;
import com.camera.base.inerface.VideoFrameBufferType;
import android.graphics.YuvImage;
import org.webrtc.CapturerObserver;
import org.webrtc.JavaI420Buffer;
import org.webrtc.Logging;
import org.webrtc.NV21Buffer;

import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;

public class WebrtcVideoCapturer implements VideoCapturer {
  private interface VideoReader {
    VideoFrame getNextFrame();
    void close();
  }

  /**
   * Read video data from file for the .y4m container.
   */
  @SuppressWarnings("StringSplitter")
  private static class VideoReaderY4M implements VideoReader {
    private static final String TAG = "VideoReaderY4M";
    private static final String Y4M_FRAME_DELIMETER = "FRAME";
    private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;

    private final int frameWidth;
    private final int frameHeight;
    // First char after header
    private final long videoStart;
    private final RandomAccessFile mediaFile;
    private final FileChannel mediaFileChannel;

    public VideoReaderY4M(String file) throws IOException {
      mediaFile = new RandomAccessFile(file, "r");
      mediaFileChannel = mediaFile.getChannel();
      StringBuilder builder = new StringBuilder();
      for (;;) {
        int c = mediaFile.read();
        if (c == -1) {
          // End of file reached.
          throw new RuntimeException("Found end of file before end of header for file: " + file);
        }
        if (c == '\n') {
          // End of header found.
          break;
        }
        builder.append((char) c);
      }
      videoStart = mediaFileChannel.position();
      String header = builder.toString();
      String[] headerTokens = header.split("[ ]");
      int w = 0;
      int h = 0;
      String colorSpace = "";
      for (String tok : headerTokens) {
        char c = tok.charAt(0);
        switch (c) {
          case 'W':
            w = Integer.parseInt(tok.substring(1));
            break;
          case 'H':
            h = Integer.parseInt(tok.substring(1));
            break;
          case 'C':
            colorSpace = tok.substring(1);
            break;
        }
      }
      Logging.d(TAG, "Color space: " + colorSpace);
      if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
        throw new IllegalArgumentException(
            "Does not support any other color space than I420 or I420mpeg2");
      }
      if ((w % 2) == 1 || (h % 2) == 1) {
        throw new IllegalArgumentException("Does not support odd width or height");
      }
      frameWidth = w;
      frameHeight = h;
      Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
    }

    @Override
    public VideoFrame getNextFrame() {
      final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
      final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
      final ByteBuffer dataY = buffer.getDataY();
      final ByteBuffer dataU = buffer.getDataU();
      final ByteBuffer dataV = buffer.getDataV();
      final int chromaHeight = (frameHeight + 1) / 2;
      final int sizeY = frameHeight * buffer.getStrideY();
      final int sizeU = chromaHeight * buffer.getStrideU();
      final int sizeV = chromaHeight * buffer.getStrideV();

      try {
        ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
        if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
          // We reach end of file, loop
          mediaFileChannel.position(videoStart);
          if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
            throw new RuntimeException("Error looping video");
          }
        }
        String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
        if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
          throw new RuntimeException(
              "Frames should be delimited by FRAME plus newline, found delimter was: '"
              + frameDelimStr + "'");
        }

        mediaFileChannel.read(dataY);
        mediaFileChannel.read(dataU);
        mediaFileChannel.read(dataV);
      } catch (IOException e) {
        throw new RuntimeException(e);
      }

      return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
    }

    @Override
    public void close() {
      try {
        // Closing a file also closes the channel.
        mediaFile.close();
      } catch (IOException e) {
        Logging.e(TAG, "Problem closing file", e);
      }
    }
  }

  private final static String TAG = "WebrtcVideoCapturer";
  private VideoReader videoReader;
  private CapturerObserver capturerObserver;
  private final Timer timer = new Timer();
  private ICPInterface mICPInterface;

  private final TimerTask tickTask = new TimerTask() {
    @Override
    public void run() {
      tick();
    }
  };
  private boolean haveVideo = true;
  public WebrtcVideoCapturer(String inputFile) throws IOException {
    try {
      videoReader = new VideoReaderY4M(inputFile);
    } catch (IOException e) {
      Logging.d(TAG, "Could not open video file: " + inputFile);
      throw e;
    }
  }

  public WebrtcVideoCapturer(ICPInterface mICPInterface, boolean haveVideo){
    this.mICPInterface = mICPInterface;
    this.haveVideo = haveVideo;
  }

  public WebrtcVideoCapturer(ICPInterface mICPInterface){
    this.mICPInterface = mICPInterface;
    this.haveVideo = true;
  }

  public void tick() {
    VideoFrame videoFrame = videoReader.getNextFrame();
    capturerObserver.onFrameCaptured(videoFrame);
    videoFrame.release();
  }

  private MediaCodec mediaCodec;
  private static final String MIME_TYPE = "video/avc";
  private ByteBuffer[] inputBuffers;

  @Override
  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
                         CapturerObserver capturerObserver) {
    this.capturerObserver = capturerObserver;

    //getMediaCodecList();
//    startEncoder();
  }

  public void stopEncoder() {
    if (mediaCodec != null) {
      mediaCodec.stop();
      mediaCodec.release();
      mediaCodec = null;
    }
  }

  private void startEncoder() {
    try {
      Log.d(TAG, "onGetNetVideoData: ");
      //创建解码器 H264的Type为  AAC
      mediaCodec = MediaCodec.createDecoderByType(MIME_TYPE);
      //创建配置
      MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1280, 720);
      //设置解码预期的帧速率【以帧/秒为单位的视频格式的帧速率的键】
      mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
      mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);//
      //配置绑定mediaFormat和surface
      mediaCodec.configure(mediaFormat, null, null, 0);
      mediaCodec.start();
      inputBuffers = mediaCodec.getInputBuffers();
    } catch (IOException e) {
      e.printStackTrace();
      //创建解码失败
      Log.e(TAG, "创建解码失败");
    }

  }

  private int mCount;
  private boolean isPrintImage = true;

  private void onFrame(byte[] buf, int offset, int length, long timeS) {
    //查询10000毫秒后，如果dSP芯片的buffer全部被占用，返回-1；存在则大于0
    int inIndex = mediaCodec.dequeueInputBuffer(0);
    //Log.i(TAG, "inIndex: " + inIndex);
    if (inIndex >= 0) {
      //根据返回的index拿到可以用的buffer
      ByteBuffer byteBuffer = inputBuffers[inIndex];
      //清空缓存
      byteBuffer.clear();
      //开始为buffer填充数据
//            byteBuffer.put(buf, startIndex, nextFrameStart - startIndex);
      byteBuffer.put(buf);
      //填充数据后通知mediacodec查询inIndex索引的这个buffer,
      mediaCodec.queueInputBuffer(inIndex, 0, length, inputBuffers.length/*mCount * 20*/, 0);
      //为下一帧做准备，下一帧首就是前一帧的尾。
//            startIndex = nextFrameStart;
      mCount++;
    } else {
      Log.i(TAG, "inIndex < 0");
      //等待查询空的buffer
      return;
    }

    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    //mediaCodec 查询 "mediaCodec的输出方队列"得到索引
    int outIndex = mediaCodec.dequeueOutputBuffer(info, 0);
    //Log.e(TAG, "解码输出outIndex " + outIndex);
    if (outIndex >= 0) {
//            try {
//                //暂时以休眠线程方式放慢播放速度
//                Thread.sleep(33);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            }
      ////////////////////////////////////////新增START/////////////////////////////////////////////
      if (isPrintImage) {
        //dsp的byteBuffer无法直接使用
        ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);
        //设置偏移量
        byteBuffer.position(info.offset);
        byteBuffer.limit(info.size + info.offset);

        byte[] ba = new byte[byteBuffer.remaining()];
        byteBuffer.get(ba);

//        VideoFrame.Buffer frameBuffer = new NV21Buffer(
//                ba, 1280, 720, null);
//        final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
//        capturerObserver.onFrameCaptured(new VideoFrame(frameBuffer, 0 , captureTimeNs));

      }

      /*try {
        Thread.sleep(25);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }*/
      ////////////////////////////////////////////END/////////////////////////////////////////

      //如果surface绑定了，则直接输入到surface渲染并释放
      mediaCodec.releaseOutputBuffer(outIndex, false);
    } else {
      Log.e(TAG, "没有解码成功");
    }
  }
  MFramePusher mFramePusher;
  public class MFramePusher extends FramePusher {
    public MFramePusher(int type) {
      super(type);
    }

    @Override
    public void onFrame(com.camera.base.inerface.VideoFrame farme) {
      if (farme.getBuffer().getBufferType() == VideoFrameBufferType.NV21_ONLY) {
//        Log.e(TAG, "onFrame111111111111: "+ +((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getData().length);
//        saveNV21ToImage(((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getData(), ((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getWidthD(), ((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getHeightD(), Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator +"test2222222.jpg");
        VideoFrame.Buffer frameBuffer = new NV21Buffer(((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getData(), ((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getWidthD(), ((com.camera.base.inerface.NV21Buffer) farme.getBuffer()).getHeightD(), null);
        final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
        capturerObserver.onFrameCaptured(new VideoFrame(frameBuffer, 0 , captureTimeNs));
      }

    }
  }

  boolean a =false;
  public static void saveNV21ToImage(byte[] nv21, int width, int height, String filePath) {
    // 创建 YuvImage 对象
    YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);

    // 定义图像区域 (通常是整个图像)
    Rect rect = new Rect(0, 0, width, height);

    // 定义用于保存图片的输出流
    FileOutputStream fileOutputStream = null;
    try {
      fileOutputStream = new FileOutputStream(new File(filePath));

      // 将 YuvImage 压缩并保存为 JPEG 格式
      yuvImage.compressToJpeg(rect, 100, fileOutputStream);
    } catch (IOException e) {
      e.printStackTrace();
    } finally {
      if (fileOutputStream != null) {
        try {
          fileOutputStream.close();
        } catch (IOException e) {
          e.printStackTrace();
        }
      }
    }
  }

  @Override
  public void startCapture(int width, int height, int framerate) {
    //timer.schedule(tickTask, 0, 1000 / framerate);
    if (mICPInterface != null){
      if (!mICPInterface.isICPCameraOpen()){
        mICPInterface.openICPCamera();
      }
//      mICPInterface.startSubcodeStream(pusher);
//      mICPInterface.addCallbackBuffer(pusher);
      mFramePusher = new MFramePusher(VideoFrameBufferType.NV21_ONLY);
      mICPInterface.addSubcodeStreamPusher(mFramePusher);
    }
  }

  @Override
  public void stopCapture() throws InterruptedException {
    //timer.cancel();
    if (mICPInterface != null && mICPInterface.isICPCameraOpen()){
//      mICPInterface.removeCallbackBuffer(pusher);
      mICPInterface.removeSubcodeStream(mFramePusher);
//      if (mICPInterface.isICPCameraOpen()){
//        mICPInterface.closeICPCamera();
//      }
    }
  }

  @Override
  public void changeCaptureFormat(int width, int height, int framerate) {
    // Empty on purpose
  }

  @Override
  public void dispose() {
    //videoReader.close();
//    stopEncoder();
  }

  @Override
  public boolean isScreencast() {
    return false;
  }
}
