//
//  VideoDecodeViewController.m
//  FFMpegKitDemo
//
//  Created by ilongge on 2023/8/14.
//
#import <VideoToolbox/VideoToolbox.h>
#import "VideoDecodeViewController.h"

@interface VideoDecodeViewController ()
{
    AVFormatContext *_formatContext;
    AVCodecContext *_videoCodecContext;
    const struct AVCodec *_videoCodec;
    int _videoStreamIndex;
    AVStream *_videoStream;
    AVFrame *_videoFrame;
    
    struct SwsContext *_swsContext;
    AVFrame *_swsFrame;
    
    NSString *_videoFile;
    dispatch_queue_t _decode_queue;
    
    NSTimeInterval _timer_interval;
    long _origin_frame_count;
    long _decode_start_time;
    long _pts;
}
@property (weak, nonatomic) IBOutlet UITextView *consoleText;
@property (weak, nonatomic) IBOutlet UIProgressView *progressView;
@property (weak, nonatomic) IBOutlet UILabel *frame_count;
@property (weak, nonatomic) IBOutlet UILabel *fps;
@property (weak, nonatomic) IBOutlet UILabel *time;
@property (weak, nonatomic) IBOutlet UILabel *speed;
@property (nonatomic, strong) NSTimer *decodeTimer;
@end

@implementation VideoDecodeViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    self.title = @"视频解码";
    _videoFile = @"流浪地球_2_4K_HEVC.mp4";
    _decode_queue = dispatch_queue_create("com.ilongge.queue.VideoDecode", DISPATCH_QUEUE_SERIAL);
    self.progressView.progress = 0;
    NSString *pathString = [[NSBundle mainBundle] pathForResource:_videoFile ofType:nil];
    _timer_interval = 0.2;
    [self initCodec:pathString videotoolbox:NO];;
}
- (void)initCodec:(NSString *)filePath videotoolbox:(BOOL)videotoolbox
{
    if (filePath == nil) {
        NSLog(@"file not exsit");
        return;
    }
    /// 打开音频文件并获取相关上下文
    int ret = avformat_open_input(&_formatContext, filePath.UTF8String, NULL, NULL);
    if (ret != 0) {
        NSLog(@"avformat_open_input : %d", ret);
        return;
    }
    av_dump_format(_formatContext, 1, filePath.UTF8String, 0);
    {
        NSString *info_string = [FFMpegKitTool av_dump_format:_formatContext streamIndex:0 url:filePath is_output:NO];
        [self printMessage:info_string breakline:YES];
    }
    
    ret = avformat_find_stream_info(_formatContext, NULL);
    if (ret != 0) {
        NSLog(@"avformat_find_stream_info : %d", ret);
        return;
    }
    _videoStreamIndex = -1;
    /// 寻找音频流
    _videoStreamIndex = av_find_best_stream(_formatContext, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
    if (_videoStreamIndex < 0) {
        NSLog(@"av_find_best_stream : %d", ret);
        return;
    }
    /// 获取音频流
    _videoStream = _formatContext->streams[_videoStreamIndex];
    /// 获取并打开解码器
    _videoCodec = avcodec_find_decoder(_videoStream->codecpar->codec_id);
    /// 获取解码器上下文
    _videoCodecContext = avcodec_alloc_context3(_videoCodec);
    /// 装配参数
    ret = avcodec_parameters_to_context(_videoCodecContext, _videoStream->codecpar);
    if (ret < 0) {
        avcodec_free_context(&_videoCodecContext);
        NSLog(@"avcodec_parameters_to_context : %d", ret);
        return;
    }
    /// 开启videotoolbox
    if (videotoolbox) {
        // 指定解码器名称, 这里使用苹果VideoToolbox中的硬件解码器
        const char *codecName = av_hwdevice_get_type_name(AV_HWDEVICE_TYPE_VIDEOTOOLBOX);
        // 将解码器名称转为对应的枚举类型
        enum AVHWDeviceType device_type = av_hwdevice_find_type_by_name(codecName);
        if (device_type != AV_HWDEVICE_TYPE_VIDEOTOOLBOX) {
            return;
        }
        AVBufferRef *hw_device_ctx;
        ret = av_hwdevice_ctx_create(&hw_device_ctx, device_type, NULL, NULL, 0);
        if (ret < 0) {
            av_buffer_unref(&hw_device_ctx);
            return;
        }
        _videoCodecContext->hw_device_ctx = av_buffer_ref(hw_device_ctx);
        if (ret < 0){
            av_buffer_unref(&hw_device_ctx);
            return;
        }
    }
    /// 打开解码器
    ret = avcodec_open2(_videoCodecContext, _videoCodec, NULL);
    if (ret != 0) {
        NSLog(@"avcodec_open2 : %d", ret);
        return;
    }
    /// 申请AVPacket和AVFrame以及相关设置
    _videoFrame = av_frame_alloc();
    _swsFrame = av_frame_alloc();
    _swsContext = sws_getContext(_videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt,
                                 _videoCodecContext->width, _videoCodecContext->height, AV_PIX_FMT_YUV420P,
                                 SWS_BILINEAR, NULL, NULL, NULL);
}
- (IBAction)startDecodeAction:(UIButton *)sender {
    sender.userInteractionEnabled = NO;
    [self videoDecode:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            sender.userInteractionEnabled = YES;
        });
    }];
}

- (void)videoDecode:(void(^)(void))complete
{
    dispatch_async(_decode_queue, ^{
        NSString *pathString = [NSString stringWithFormat:@"%@/Documents/%@_Decode.YUV", NSHomeDirectory(), self->_videoFile];
        [[NSFileManager defaultManager] createFileAtPath:pathString contents:nil attributes:nil];
        NSFileHandle *file = [NSFileHandle fileHandleForWritingAtPath:pathString];
        {
            if (self.decodeTimer == nil) {
                self.decodeTimer = [[NSTimer alloc] initWithFireDate:[NSDate distantFuture]
                                                            interval:self->_timer_interval
                                                              target:self
                                                            selector:@selector(showDecodeSchedule)
                                                            userInfo:nil
                                                             repeats:YES];
                [[NSRunLoop currentRunLoop] addTimer:self.decodeTimer forMode:NSRunLoopCommonModes];
            }
            [self.decodeTimer setFireDate:[NSDate distantPast]];
            [self printMessage:@"开始解码" breakline:YES];
            [self printMessage:@"" breakline:YES];
            self->_origin_frame_count = 0;
            self->_decode_start_time = (long)([[NSDate date] timeIntervalSince1970] * 1000);
        }
        AVPacket *packet = av_malloc(sizeof(AVPacket));
        while (true) {
            int ret = av_read_frame(self->_formatContext, packet);
            if (ret == 0) {
                if (packet->stream_index == self->_videoStreamIndex) {
                    self->_origin_frame_count ++;
                    ret = avcodec_send_packet(self->_videoCodecContext, packet);
                    av_packet_unref(packet);
                    while (ret >= 0) {
                        ret = avcodec_receive_frame(self->_videoCodecContext, self->_videoFrame);
                        if (ret == AVERROR_EOF) {
                            av_packet_unref(packet);
                            break;
                        }
                        else if (ret == AVERROR(EAGAIN)) {
                            av_packet_unref(packet);
                            break;
                        }
                        else if (ret < 0) {
                            av_frame_unref(self->_videoFrame);
                            break;
                        }
                        else{
                            {
                                [self printMessage:[FFMpegKitTool av_picture_type:self->_videoFrame] breakline:NO];
                                [self printMessage:@"." breakline:NO];
                                self->_pts = self->_videoFrame->pts + self->_videoFrame->pkt_duration;
                                dispatch_async(dispatch_get_main_queue(), ^{
                                    self.progressView.progress = self->_origin_frame_count * 1.00 / self->_videoStream->nb_frames;
                                });
                            }
                            if (self->_videoFrame->format == AV_PIX_FMT_VIDEOTOOLBOX) {
                                NSLog(@"AV_PIX_FMT_VIDEOTOOLBOX");
                                CVPixelBufferRef buffer = (CVPixelBufferRef)(self->_videoFrame->data[3]);
                                
                            }
                            else{
                                ret = sws_scale_frame(self->_swsContext, self->_swsFrame, self->_videoFrame);
                                {
                                    NSData *video_frame_data = [self extractYUVData:self->_swsFrame];
                                    [file seekToEndOfFile];
                                    [file writeData:video_frame_data];
                                    [file synchronizeFile];
                                    video_frame_data = nil;
                                }
                            }
                            NSLog(@"interlaced_frame: %d", self->_videoFrame->interlaced_frame);
                        }
                    }
                }
                else{
                    av_packet_unref(packet);
                    continue;
                }
            }
            else{
                av_packet_unref(packet);
                break;
            }
        }
        // 冲刷解码器
        while (true) {
            int ret = avcodec_send_packet(self->_videoCodecContext, packet);
            if (ret >= 0) {
                self->_origin_frame_count ++;
            }
            av_packet_unref(packet);
            while (ret >= 0) {
                ret = avcodec_receive_frame(self->_videoCodecContext, self->_videoFrame);
                if (ret == AVERROR_EOF) {
                    av_packet_unref(packet);
                    break;
                }
                else if (ret == AVERROR(EAGAIN)) {
                    av_packet_unref(packet);
                    break;
                }
                else if (ret < 0) {
                    av_frame_unref(self->_videoFrame);
                    break;
                }
                else{
                    {
//                        [self printMessage:[FFMpegKitTool av_picture_type:self->_videoFrame] breakline:NO];
//                        [self printMessage:@"." breakline:NO];
                        self->_pts = self->_videoFrame->pts + self->_videoFrame->pkt_duration;
                        dispatch_async(dispatch_get_main_queue(), ^{
                            self.progressView.progress = self->_origin_frame_count * 1.00 / self->_videoStream->nb_frames;
                        });
                    }
                    sws_scale_frame(self->_swsContext, self->_swsFrame, self->_videoFrame);
                    NSData *video_frame_data = [self extractYUVData:self->_swsFrame];
                    [file seekToEndOfFile];
                    [file writeData:video_frame_data];
                    [file synchronizeFile];
                    video_frame_data = nil;
                }
            }
            if (ret < 0) {
                break;
            }
        }
        {
            [self printMessage:@"解码完成" breakline:YES];
            [self printMessage:@"共" breakline:YES];
            [self printMessage:@(self->_origin_frame_count).description breakline:NO];
            [self printMessage:@"frames" breakline:NO];
            [self.decodeTimer setFireDate:[NSDate distantFuture]];
            [self.decodeTimer invalidate];
            [self showDecodeSchedule];
        }
        av_seek_frame(self->_formatContext, self->_videoStreamIndex, 0, 0);
        [file closeFile];
        av_packet_free(&packet);
        if (complete) {
            complete();
        }
    });
}

- (NSData *)extractYUVData:(AVFrame *)videoFrame
{
    NSData *y = [self dataFromVideoFrame:videoFrame->data[0]
                                linesize:videoFrame->linesize[0]
                                   width:videoFrame->width
                                  height:videoFrame->height];
    NSData *Cb = [self dataFromVideoFrame:videoFrame->data[1]
                                 linesize:videoFrame->linesize[1]
                                    width:videoFrame->width / 2
                                   height:videoFrame->height / 2];
    NSData *Cr = [self dataFromVideoFrame:videoFrame->data[2]
                                 linesize:videoFrame->linesize[2]
                                    width:videoFrame->width / 2
                                   height:videoFrame->height / 2];
    NSMutableData *video_frame_data = [NSMutableData data];
    [video_frame_data appendData:y];
    [video_frame_data appendData:Cb];
    [video_frame_data appendData:Cr];
    y = nil;
    Cb = nil;
    Cr = nil;
    return video_frame_data;
}
- (NSData *)dataFromVideoFrame:(UInt8 *)data linesize:(int)linesize width:(int)width height:(int)height {
    width = MIN(linesize, width);
    NSMutableData *md = [NSMutableData dataWithBytes:data length:width * height];
    return md;
}

#pragma mark ------------------------- OTHER
- (void)printMessage:(NSString *)message breakline:(BOOL)breakline
{
    dispatch_async(dispatch_get_main_queue(), ^{
        NSMutableString *string = [[NSMutableString alloc] init];
        if (self.consoleText.text.length) {
            [string appendString:self.consoleText.text];
        }
        if (string.length && breakline){
            [string appendString:@"\n"];
            printf("\n");
        }
        else if (string.length && breakline == NO){
            [string appendString:@" "];
            printf(" ");
        }
        if (message.length) {
            [string appendString:message];
            printf("%s", message.UTF8String);
        }
        self.consoleText.text = string;
        CGFloat offset = self.consoleText.contentSize.height - self.consoleText.frame.size.height;
        BOOL needTrans = offset > self.consoleText.contentOffset.y;
        if (offset > 0 && needTrans) {
            [self.consoleText setContentOffset:CGPointMake(0, offset) animated:NO];
        }
    });
}
- (void)showDecodeSchedule
{
    dispatch_async(dispatch_get_main_queue(), ^{
        long  stop = (long)([[NSDate date] timeIntervalSince1970] * 1000);
        float fps = (self->_origin_frame_count * 1000) / (stop - self->_decode_start_time);
        float speed = fps / self->_videoCodecContext->time_base.den * self->_videoCodecContext->time_base.num;
        self.frame_count.text = [NSString stringWithFormat:@"frame:%ld", self->_origin_frame_count];
        self.fps.text = [NSString stringWithFormat:@"fps:%.2f", fps];
        double value =  self->_pts * av_q2d(self->_videoCodecContext->time_base) * 1000;
        self.time.text = [FFMpegKitTool av_format_duration:value];
        self.speed.text = [NSString stringWithFormat:@"%.2fx", speed];
    });
}

- (void)dealloc
{
    if (_formatContext != NULL) {
        avformat_close_input(&_formatContext);
    }
    if (_videoCodecContext != NULL) {
        avcodec_free_context(&_videoCodecContext);
    }
    printf("\n%s\n", __func__);
}

@end
