

#include <assert.h>
#include <ctype.h>
#include <fcntl.h>
#include <inttypes.h>
#include <getopt.h>
#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>

#include <termios.h>
#include <unistd.h>

#define LOG_TAG "DisplayRecorder"
#define ATRACE_TAG ATRACE_TAG_GRAPHICS
//#define LOG_NDEBUG 0
#include <utils/Log.h>

#include <utils/Errors.h>
#include <utils/SystemClock.h>
#include <utils/Timers.h>
#include <utils/Trace.h>

#include <gui/ISurfaceComposer.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include <gui/ISurfaceComposer.h>
#include <media/MediaCodecBuffer.h>
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaFormatPriv.h>
#include <media/NdkMediaMuxer.h>
#include <media/openmax/OMX_IVCommon.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
#include <mediadrm/ICrypto.h>
#include <ui/DisplayConfig.h>
#include <ui/DisplayState.h>

#include <utility>

#include "DisplayRecorder.h"

using android::ABuffer;
using android::ALooper;
using android::AMessage;
using android::AString;
using android::DisplayConfig;

using android::IBinder;
using android::IGraphicBufferProducer;
using android::ISurfaceComposer;
using android::MediaCodec;
using android::MediaCodecBuffer;

using android::PersistentSurface;
using android::PhysicalDisplayId;
using android::Rect;
using android::String8;
using android::SurfaceComposerClient;
using android::Vector;
using android::sp;
using android::status_t;

using android::INVALID_OPERATION;
using android::NAME_NOT_FOUND;
using android::NO_ERROR;
using android::UNKNOWN_ERROR;

namespace ui = android::ui;


// 静态函数
static inline uint32_t floorToEven(uint32_t num) {
    return num & ~1;
}
// 静态函数

DisplayRecorder::DisplayRecorder(PhysicalDisplayId displayId, std::string fileName,
                                 int duration) {
    this->displayId = displayId;
    this->filePath = std::move(fileName);
    this->duration = duration;
    ALOGD("Creating DisplayRecorder with displayId=%lu filePath=%s duration=%d", (long)displayId, this->filePath.c_str(), duration);
    ensureVideoFile(this->filePath);
}

DisplayRecorder::DisplayRecorder(PhysicalDisplayId displayId, std::string fileName) {
    this->displayId = displayId;
    this->filePath = std::move(fileName);
    // 默认 60s
    this->duration = this->gDefaultDurationSec;
    ALOGD("Creating DisplayRecorder with displayId=%lu filePath=%s duration=%d", (long)displayId, this->filePath.c_str(), duration);
    ensureVideoFile(this->filePath);
}

void DisplayRecorder::ensureVideoFile(const std::string& fileName){
    int fd = open(fileName.c_str(), O_CREAT | O_RDWR, 0644);
    if (fd < 0) {
        ALOGE("Unable to open '%s'\n", fileName.c_str());
    }
    close(fd);
}

status_t DisplayRecorder::setDisplayProjection(
        SurfaceComposerClient::Transaction& t,
        const sp<IBinder>& dpy,
        const ui::DisplayState& displayState){
    const ui::Size& viewport = displayState.viewport;
    // Set the region of the layer stack we're interested in, which in our
    // case is "all of it".
    Rect layerStackRect(viewport);

    // We need to preserve the aspect ratio of the display.
    float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
    // supply a rotation value to the display projection.
    uint32_t videoWidth, videoHeight;
    uint32_t outWidth, outHeight;
    videoWidth = gVideoWidth;
    videoHeight = gVideoHeight;

    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
        // limited by narrow width; reduce height
        outWidth = videoWidth;
        outHeight = (uint32_t)(videoWidth * displayAspect);
    } else {
        // limited by short height; restrict width
        outHeight = videoHeight;
        outWidth = (uint32_t)(videoHeight / displayAspect);
    }
    uint32_t offX, offY;
    offX = (videoWidth - outWidth) / 2;
    offY = (videoHeight - outHeight) / 2;
    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);

    t.setDisplayProjection(dpy, ui::ROTATION_0, layerStackRect, displayRect);
    return NO_ERROR;
}

status_t DisplayRecorder::prepareVirtualDisplay(
        const ui::DisplayState& displayState,
        const sp<IGraphicBufferProducer>& bufferProducer,
        sp<IBinder>* pDisplayHandle) {
    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("DisplayRecorder"), false /*secure*/);
    SurfaceComposerClient::Transaction t;
    t.setDisplaySurface(dpy, bufferProducer);
    setDisplayProjection(t, dpy, displayState);
    t.setDisplayLayerStack(dpy, displayState.layerStack);
    t.apply();

    *pDisplayHandle = dpy;

    return NO_ERROR;
}

status_t DisplayRecorder::prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,sp<IGraphicBufferProducer>* pBufferProducer){
    status_t err;
    int FRAME_RATE = displayFps/2;
    //  配置编码格式
    sp<AMessage> format = new AMessage;
    format->setInt32(KEY_WIDTH, gVideoWidth);
    format->setInt32(KEY_HEIGHT, gVideoHeight);
    format->setString(KEY_MIME, kMimeTypeAvc);
    format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
    format->setInt32(KEY_BIT_RATE, gBitRate);
    // 帧率
    format->setFloat(KEY_FRAME_RATE, FRAME_RATE);
    format->setInt32(KEY_I_FRAME_INTERVAL, 1);
    //format->setInt32(KEY_MAX_B_FRAMES, 0);
    // 处理屏幕静止不动时缺少帧数据的问题
    format->setFloat(KEY_CAPTURE_RATE, FRAME_RATE);
    format->setInt64(KEY_REPEAT_PREVIOUS_FRAME_AFTER, 1000000 / FRAME_RATE);
    // 初始化 MediaCodec
    sp<android::ALooper> looper = new android::ALooper;
    looper->setName("screenrecord_looper");
    looper->start();
    ALOGD("Creating codec");
    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
    if (codec == NULL) {
        ALOGE("ERROR: unable to create codec instance\n");
        return UNKNOWN_ERROR;
    }
    // 配置 MediaCodec
    err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE);
    if (err != NO_ERROR) {
        ALOGD("ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
                kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
        codec->release();
        return err;
    }

    ALOGD("Creating encoder input surface");
    // 创建输入 Buffer
    sp<IGraphicBufferProducer> bufferProducer;
    err = codec->createInputSurface(&bufferProducer);
    if (err != NO_ERROR) {
        ALOGE("ERROR: unable to %d encoder input surface",err);
        codec->release();
        return err;
    }
    // 启动 MediaCodec
    ALOGD("Starting codec");
    err = codec->start();
    if (err != NO_ERROR) {
        ALOGE("ERROR: unable to start codec (err=%d)\n", err);
        codec->release();
        return err;
    }
    ALOGD("Codec prepared");
    *pCodec = codec;
    *pBufferProducer = bufferProducer;
    return 0;
}

// 视频录制的核心操作
status_t DisplayRecorder::runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer *muxer){
    static int kTimeout = 250000;   // be responsive on signal
    status_t err;
    ssize_t trackIdx = -1;
    ssize_t metaTrackIdx = -1;
    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(this->duration);

    bool firstFrame = true;

    Vector<sp<MediaCodecBuffer>> buffers;
    err = encoder->getOutputBuffers(&buffers);
    if (err != NO_ERROR) {
        ALOGE("Unable to get output buffers (err=%d)\n", err);
        return err;
    }
    if(onStart != nullptr){
        onStart(this->filePath.c_str());
    }
    // 缓存 Buffer，避免无数据时的写错误
    sp<ABuffer> cachedBuffer;
    int64_t lastWriteTime;
    int64_t lastPtsUsec;
    // 循环读取帧数据，然后写入文件
    while(!this->gStopRequested){
        size_t bufIndex, offset, size;
        int64_t ptsUsec;
        uint32_t flags;

        if (firstFrame) {
            ATRACE_NAME("first_frame");
            firstFrame = false;
        }

        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec){
            ALOGD("Recorder maximum duration reached!");
            this->gStopRequested = true;
            break;
        }
        // 获取输出的 Buffer 数据
        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
                &flags, kTimeout);
        ALOGD("dequeueOutputBuffer with flags=%d size=%zu ptsUsec=%ld and result=%d", flags, size, (long)ptsUsec, err);

        switch (err) {
            case NO_ERROR:
                // got a buffer
                if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
                    ALOGV("Got codec config buffer (%zu bytes)", size);
                    if (muxer != NULL) {
                        // ignore this -- we passed the CSD into MediaMuxer when
                        // we got the format change notification
                        size = 0;
                    }
                }

                if (size != 0){
                    ALOGD("Got data in buffer %zu, size=%zu, pts=%ld",bufIndex, size, (long)ptsUsec);
                    if (ptsUsec == 0) {
                        ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                    }
                    ATRACE_NAME("write sample");
                    // 避免下次读取时无数据
                    cachedBuffer.clear();
                    cachedBuffer = ABuffer::CreateAsCopy(buffers[bufIndex]->data(), buffers[bufIndex]->size());
                    lastWriteTime = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                    lastPtsUsec = ptsUsec;
                    sp<ABuffer> buffer = new ABuffer(
                            buffers[bufIndex]->data(), buffers[bufIndex]->size());
                    AMediaCodecBufferInfo bufferInfo = {
                            0,
                            static_cast<int32_t>(buffer->size()),
                            ptsUsec,
                            flags
                    };
                    // MediaMuxer 写入数据
                    err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
                    ALOGD("writeSampleData %ld", (long)ptsUsec);
                    // 第二个视频写入时出错，err=-10005 AMEDIA_ERROR_INVALID_OPERATION
                    if (err != NO_ERROR) {
                        ALOGE("Failed writing data to muxer (err=%d)\n", err);
                        return err;
                    }
                }

                err = encoder->releaseOutputBuffer(bufIndex);
                if (err != NO_ERROR) {
                    ALOGE("Unable to release output buffer (err=%d)\n",err);
                    return err;
                }
                // 接收到最后一帧
                if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
                    // Not expecting EOS from SurfaceFlinger.  Go with it.
                    ALOGD("Received end-of-stream");
                    this->gStopRequested = true;
                }
                break;
            case -EAGAIN:
                {
                    // 手动计算时间戳
                    int64_t currentPtsUsec = lastPtsUsec + systemTime(SYSTEM_TIME_MONOTONIC) / 1000 - lastWriteTime;
                    ALOGD("Got -EAGAIN,  currentPtsUsec= %ld", (long)currentPtsUsec);
                    lastPtsUsec = currentPtsUsec;
                    lastWriteTime = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                    if(cachedBuffer != nullptr){
                        // 使用上一帧的数据
                        AMediaCodecBufferInfo bufferInfo = {
                                0,
                                static_cast<int32_t>(cachedBuffer->size()),
                                currentPtsUsec,
                                flags
                        };
                        err = AMediaMuxer_writeSampleData(muxer, trackIdx, cachedBuffer->data(), &bufferInfo);
                    }
                    ALOGD("Got -EAGAIN, looping");
                }
                break;
            case android::INFO_FORMAT_CHANGED:
                {
                    // Format includes CSD, which we must provide to muxer.
                    ALOGD("Encoder format changed");
                    sp<AMessage> newFormat;
                    encoder->getOutputFormat(&newFormat);
                    // TODO remove when MediaCodec has been replaced with AMediaCodec
                    AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
                    if (muxer != NULL) {
                        trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
                        AMediaFormat *metaFormat = AMediaFormat_new();
                        AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
                        metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                        ALOGD("Encoder format changed, new metaTrackIdx");
                        AMediaFormat_delete(metaFormat);
                        ALOGD("Starting muxer");
                        err = AMediaMuxer_start(muxer);
                        if (err != NO_ERROR) {
                            ALOGE("Unable to start muxer (err=%d)\n", err);
                            return err;
                        }
                    }
                }
                break;
            case android::INFO_OUTPUT_BUFFERS_CHANGED:
                ALOGD("Encoder buffers changed");
                err = encoder->getOutputBuffers(&buffers);
                if (err != NO_ERROR) {
                    ALOGE("Unable to get new output buffers (err=%d)\n", err);
                    return err;
                }
                break;
            case INVALID_OPERATION:
                ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
                return err;
            default:
                ALOGW("Got weird result %d from dequeueOutputBuffer\n", err);
                return err;
        }
    }

    ALOGV("Encoder stopping (req=%d)", this->gStopRequested);
    cachedBuffer.clear();
    buffers.clear();

    return NO_ERROR;
}

status_t DisplayRecorder::start() {
    status_t err;

    // 获取 Display 信息
    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(this->displayId);
    if (display == nullptr) {
        ALOGE("ERROR: no display\n");
        if(onFail != nullptr){
            onFail(-1);
        }
        return NAME_NOT_FOUND;
    }
    ui::DisplayState displayState;
    err = SurfaceComposerClient::getDisplayState(display, &displayState);
    if (err != NO_ERROR) {
        ALOGE("ERROR: unable to get display state\n");
        if(onFail != nullptr){
            onFail(-2);
        }
        return err;
    }
    DisplayConfig displayConfig;
    err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig);
    if (err != NO_ERROR) {
        ALOGE("ERROR: unable to get display config\n");
        if(onFail != nullptr){
            onFail(-3);
        }
        return err;
    }
    // 获取窗口尺寸信息
    const ui::Size& viewport = displayState.viewport;
    ALOGD("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                viewport.getWidth(), viewport.getHeight(), displayConfig.refreshRate,
                toCString(displayState.orientation), displayState.layerStack);
    if (gVideoWidth == 0) {
        gVideoWidth = floorToEven(viewport.getWidth());
    }
    if (gVideoHeight == 0) {
        gVideoHeight = floorToEven(viewport.getHeight());
    }

    // 初始化并开启 MediaCodec，初始化 InputSurface
    sp<MediaCodec> encoder;
    sp<IGraphicBufferProducer> encoderInputSurface;
    err = prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);

    if (err != NO_ERROR){
        ALOGE("ERROR: unable to prepareEncoder\n");
        if(onFail != nullptr){
            onFail(-4);
        }
        return err;
    } 

    // 关联 VirtualDisplay 和 InputSurface
    sp<IBinder> dpy;
    err = prepareVirtualDisplay(displayState, encoderInputSurface, &dpy);
    if (err != NO_ERROR) {
        ALOGE("ERROR: unable to prepareVirtualDisplay\n");
        if (encoder != NULL) {
            encoder->release();
        }
        if(onFail != nullptr){
            onFail(-5);
        }
        return err;
    }

    AMediaMuxer *muxer = nullptr;
    const char * fileName = this->filePath.c_str();
    // Unbelievable Code Flow
    err  = unlink(fileName);
    if (err != 0 && errno != ENOENT) {
        ALOGE("ERROR: couldn't remove existing file\n");
        if(onFail != nullptr){
            onFail(-8);
        }
    }
    int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
    if (fd < 0) {
        ALOGE("ERROR: couldn't open file\n");
        if(onFail != nullptr){
            onFail(-9);
        }
    }
    // Unbelievable Code Flow
    // 初始化 MediaMuxer
    muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
    close(fd);

    // 开启视频录制
    err = runEncoder(encoder, muxer);
    if (err != NO_ERROR) {
        ALOGE("Encoder failed (err=%d)\n", err);
        if(onFail != nullptr){
            onFail(-6);
        }
    }

    // 释放资源
    encoderInputSurface = NULL;
    SurfaceComposerClient::destroyDisplay(dpy);
    if (encoder != NULL) {
        encoder->stop();
    }
    if (muxer != NULL) {
        // If we don't stop muxer explicitly, i.e. let the destructor run,
        // it may hang (b/11050628).
        err = AMediaMuxer_stop(muxer);
        if (err != NO_ERROR) {
            ALOGE("MediaMuxer stop failed (err=%d)\n", err);
            if(onFail != nullptr){
                onFail(-7);
            }
        }
        err = AMediaMuxer_delete(muxer);
        if (err != NO_ERROR) {
            ALOGE("MediaMuxer delete failed (err=%d)\n", err);
        }
    }
    if (encoder != NULL) {
        encoder->release();
    }
    if(onFinish != nullptr){
        onFinish();
    }
    return err;
}

status_t DisplayRecorder::stop() {
    this->gStopRequested = true;
    return 0;
}

void DisplayRecorder::setCallback(DisplayRecorder::OnStart start, DisplayRecorder::OnFinish finish, DisplayRecorder::OnFail fail) {
    this->onStart = std::move(start);
    this->onFinish = std::move(finish);
    this->onFail = std::move(fail);
}

void DisplayRecorder::release(){
    ALOGD("DisplayRecorder release start!");

    filePath.clear();
    filePath.shrink_to_fit();
   
    onStart = nullptr;
    onFinish = nullptr;
    onFail = nullptr;

    ALOGD("DisplayRecorder release end！");
}

DisplayRecorder::~DisplayRecorder(){
    release();
    ALOGD("DisplayRecorder finalize！");
}
