//
//  SampleHandler.m
//  screenRecorder
//
//  Created by lcj on 2020/11/9.
//  Copyright © 2020 apang.men. All rights reserved.
//


#import "SampleHandler.h"

@interface SampleHandler ()
@property (nonatomic, assign) BOOL isFirstVideoFrame;
@property (nonatomic, assign) BOOL writing;
@property (nonatomic, copy) NSString * fileFolder;
@property (nonatomic, strong) NSURL * filePath;
@property (nonatomic, strong) AVAssetWriterInput * videoWriterInput;
@property (nonatomic, strong) AVAssetWriterInput * micAudioWriteInput;
@property (nonatomic, strong) AVAssetWriterInput * speakerAudioWriteInput;
@property (nonatomic, strong) AVAssetWriter * assetwriter;
@property (nonatomic, assign) CMTime previousPresentationTimeStamp;
@end

@implementation SampleHandler

- (NSString *)fileFolder {
    if (_fileFolder == nil) {
        NSFileManager * fileManager = [NSFileManager defaultManager];
        NSURL * groupDictionary = [fileManager containerURLForSecurityApplicationGroupIdentifier:@"group.men.apang.Example"];
        _fileFolder = [groupDictionary URLByAppendingPathComponent:@"videos"].relativePath;
        if ([fileManager fileExistsAtPath:_fileFolder] == NO) {
            NSError * error;
            [fileManager createDirectoryAtPath:_fileFolder withIntermediateDirectories:YES attributes:nil error:&error];
            NSLog(@"create folder error:%@",error);
        }
    }
    return _fileFolder;
}

- (NSURL *)filePath {
    if (_filePath == nil) {
        NSString * name = [NSString stringWithFormat:@"%.0f",[NSDate date].timeIntervalSince1970];
        NSString * _tmpPath = [[self.fileFolder stringByAppendingPathComponent:name] stringByAppendingPathExtension:@"mp4"];
        _filePath = [NSURL fileURLWithPath:_tmpPath];
        [[NSFileManager defaultManager] removeItemAtURL:_filePath error:nil];
    }
    NSLog(@"filePath:%@",_filePath);
    return _filePath;
}

- (AVAssetWriterInput *)videoWriterInput {
    if (_videoWriterInput == nil) {
        /*
         视频清晰度      分辨率     建议码率
           480p       720x480   1800Kbps
           720p       1280x720  3500kBps
          1080p      1920x1080  8500Kbps
         */
        //每像素比特
        CGSize size = [UIScreen mainScreen].bounds.size;
        CGFloat scale = [UIScreen mainScreen].scale;
        NSInteger kWidth = size.width * scale, kHeight = size.height * scale;
        NSLog(@"kWidth:%ld  kHeight:%ld",(long)kWidth, (long)kHeight);
        CGFloat bitsPerPixel = 4.0;
        NSInteger bitsPerSecond = kWidth * kHeight * bitsPerPixel;
        NSString * videoCode = AVVideoCodecH264;
        if (@available(iOS 11.0, *)) {
            videoCode = AVVideoCodecTypeH264;
        }
        NSDictionary * outputSetting = @{
            AVVideoCodecKey: videoCode,
            AVVideoWidthKey: @(kWidth),
            AVVideoHeightKey: @(kHeight),
            AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
            AVVideoCompressionPropertiesKey: @{
                    AVVideoAverageBitRateKey: @(bitsPerSecond),// 比特率(每秒比特数)，码率
                    AVVideoExpectedSourceFrameRateKey: @(15),//不能用于帧率控制，配合AVVideoProfileLevelKey使用的，具体什么作用不清楚
                    AVVideoMaxKeyFrameIntervalKey: @(15),
                    AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel
            }
        };
        _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSetting];
        _videoWriterInput.expectsMediaDataInRealTime = YES;
    }
    return _videoWriterInput;
}

- (AVAssetWriterInput *)micAudioWriteInput {
    if (_micAudioWriteInput == nil) {
        NSDictionary *audioSetting = @{
            AVFormatIDKey: @(kAudioFormatMPEG4AAC),
            AVNumberOfChannelsKey: @(1),
            AVSampleRateKey: @(22050)
        };
        _micAudioWriteInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSetting];
        _micAudioWriteInput.expectsMediaDataInRealTime = YES;
    }
    return _micAudioWriteInput;
}

- (AVAssetWriterInput *)speakerAudioWriteInput {
    if (_speakerAudioWriteInput == nil) {
           NSDictionary *audioSetting = @{
               AVFormatIDKey: @(kAudioFormatMPEG4AAC),
               AVNumberOfChannelsKey: @(1),
               AVSampleRateKey: @(22050)
           };
           _speakerAudioWriteInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSetting];
           _speakerAudioWriteInput.expectsMediaDataInRealTime = YES;
       }
       return _speakerAudioWriteInput;
}

- (AVAssetWriter *)assetwriter {
    if (_assetwriter == nil) {
        _isFirstVideoFrame = YES;
        _assetwriter = [[AVAssetWriter alloc] initWithURL:self.filePath fileType:AVFileTypeMPEG4 error:nil];
        if ([_assetwriter canAddInput:self.videoWriterInput]) {
            [_assetwriter addInput:self.videoWriterInput];
        }
        if ([_assetwriter canAddInput:self.micAudioWriteInput]) {
            [_assetwriter addInput:self.micAudioWriteInput];
        }
        if ([_assetwriter canAddInput:self.speakerAudioWriteInput]) {
            [_assetwriter addInput:self.speakerAudioWriteInput];
        }
    }
    return _assetwriter;
}

- (CMSampleBufferRef)dropSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    CMSampleTimingInfo sampleTimingInfo = {0};
    CMSampleBufferGetSampleTimingInfo(sampleBuffer, 0, &sampleTimingInfo);
    if (_isFirstVideoFrame == YES) {
        _isFirstVideoFrame = NO;
        _previousPresentationTimeStamp = sampleTimingInfo.presentationTimeStamp;
        return sampleBuffer;
    } else {
        NSInteger frameRate = 30;
        CMTime subtract = CMTimeSubtract(sampleTimingInfo.presentationTimeStamp, _previousPresentationTimeStamp);
        Float64 bitRateSeconds = 1.0 / (1.0 * frameRate);
        NSLog(@"%f",CMTimeGetSeconds(subtract));
        if (CMTimeGetSeconds(subtract) >= bitRateSeconds) {
            _previousPresentationTimeStamp = sampleTimingInfo.presentationTimeStamp;
            return sampleBuffer;
        }
    }
    return NULL;
}

- (void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer bufferType:(RPSampleBufferType)bufferType API_AVAILABLE(ios(10.0)){
    NSLog(@"status:%ld  bufferType:%@",(long)self.assetwriter.status, (bufferType == 1)?@"视频":(bufferType == 2)?@"App内音频":(bufferType == 3)?@"mic":@"unknow");
    if (self.assetwriter.status == AVAssetWriterStatusWriting) {
        if (self.writing == NO) {
            CMTime _time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            [self.assetwriter startSessionAtSourceTime:_time];
            self.writing = YES;
        }
        if (self.writing == YES) {
            if (bufferType == RPSampleBufferTypeVideo) {
//                CMSampleBufferRef _sampleBuffer = [self dropSampleBuffer:sampleBuffer];
//                if (_sampleBuffer != NULL) {
                    if (self.videoWriterInput.readyForMoreMediaData == YES) {
                        [self.videoWriterInput appendSampleBuffer:sampleBuffer];
                    }
//                }
            } else if (bufferType == RPSampleBufferTypeAudioApp) {
                if (self.speakerAudioWriteInput.readyForMoreMediaData == YES) {
                    [self.speakerAudioWriteInput appendSampleBuffer:sampleBuffer];
                }
            } else if (bufferType == RPSampleBufferTypeAudioMic) {
                if (self.micAudioWriteInput.readyForMoreMediaData == YES) {
                    [self.micAudioWriteInput appendSampleBuffer:sampleBuffer];
                }
            }
        }
    }
}

- (void)stopWrite {
    _writing = NO;
    _isFirstVideoFrame = NO;
    [self.videoWriterInput markAsFinished];
    [self.micAudioWriteInput markAsFinished];
    [self.speakerAudioWriteInput markAsFinished];
    [self.assetwriter finishWritingWithCompletionHandler:^{
        ;
    }];
    _assetwriter = nil;
    _videoWriterInput = nil;
    _micAudioWriteInput = nil;
    _speakerAudioWriteInput = nil;
    _filePath = nil;
}

- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
    // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
    NSLog(@"__%s__ :%@",__func__, setupInfo);
    if (_assetwriter == nil) {
        [self.assetwriter startWriting];
    }
}

- (void)broadcastPaused {
    // User has requested to pause the broadcast. Samples will stop being delivered.
    NSLog(@"__%s__",__func__);
}

- (void)broadcastResumed {
    // User has requested to resume the broadcast. Samples delivery will resume.
    NSLog(@"__%s__",__func__);
}

- (void)broadcastFinished {
    // User has requested to finish the broadcast.
    NSLog(@"__%s__",__func__);
    [self stopWrite];
}

- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
   
    [self writeSampleBuffer:sampleBuffer bufferType:sampleBufferType];
}

@end
