package com.iraytek.rtsplib;

import android.content.Context;
import android.graphics.ImageFormat;
import android.media.MediaCodec;

import java.nio.ByteBuffer;

import pedro.encoder.Frame;
import pedro.encoder.video.FormatVideoEncoder;
import pedro.encoder.video.GetVideoData;
import pedro.encoder.video.VideoEncoder;
import pedro.rtsp.utils.ConnectCheckerRtsp;

public class ServerEncodeManager extends BaseEncodeManager implements DataCallback<byte[]> {
    private RtspServer server;

    public ServerEncodeManager(Context context, int port, ConnectCheckerRtsp checkerRtsp) {
        super(context);
        server = new RtspServer(checkerRtsp, port);
        setVideoEncodeFormat(FormatVideoEncoder.YUV420Dynamical);
    }

    @Override
    protected VideoEncoder createVideoEncoder(GetVideoData getVideoData) {
        return new CameraVideoEncoder(getVideoData);
    }

    @Override
    protected void prepareAudioRtp(boolean isStereo, int sampleRate) {
        server.setStereo(isStereo);
        server.setSampleRate(sampleRate);
    }

    @Override
    protected void getAacDataRtp(ByteBuffer aacBuffer, MediaCodec.BufferInfo info) {
        server.sendAudio(aacBuffer, info);
    }

    @Override
    protected void onSpsPpsVpsRtp(ByteBuffer sps, ByteBuffer pps, ByteBuffer vps) {
        server.setVideoInfo(sps, pps, vps);
        server.startServer();
    }

    @Override
    protected void getVideoDataRtp(ByteBuffer buffer, MediaCodec.BufferInfo info) {
        server.sendVideo(buffer, info);
    }

    @Override
    protected void stopStreamRtp() {
        server.stopServer();
    }

    @Override
    public String getRtspAddress() {
        return "rtsp://" + server.getINetAddress();
    }

    @Override
    public void onDataChange(byte[] data) {
        if (isStreaming() || recording) {
            Frame frame = new Frame(data, 0, false, ImageFormat.NV21);
            videoEncoder.inputYUVData(frame);
        }
    }
}
