//
//  KLRePlayTool.m
//  kularkDemo
//
//  Created by szblsx2 on 2020/3/24.
//  Copyright © 2020 szblsx. All rights reserved.
//

#import "KLRePlayTool.h"
#import <ReplayKit/ReplayKit.h>


@interface KLRePlayTool ()

@property (nonatomic, strong) NSString *videoPath;
@property (nonatomic, strong) AVAssetWriter *writer;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic, strong) AVAssetWriterInput *appAudioInput;
@property (nonatomic, strong) AVAssetWriterInput *micAudioInput;

@property (nonatomic, assign) BOOL hasStopped;

@end

@implementation KLRePlayTool


/**开启录屏*/
- (void)startRecording:(void (^)(NSError * _Nonnull))callBack
{
    //开启麦克风录制
    [RPScreenRecorder sharedRecorder].microphoneEnabled = YES;
    
    if (@available(iOS 11.0, *)) {
        //创建临时存储路径
        _videoPath = [[self getTempDirectory] stringByAppendingFormat:@"/%@.mp4", @"KLReplayKit"];
        if ([[NSFileManager defaultManager] fileExistsAtPath:_videoPath]) {
            [[NSFileManager defaultManager] removeItemAtPath:_videoPath error:nil];
        }
        //创建写入
        NSError *error = nil;
        AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:_videoPath] fileType:AVFileTypeMPEG4 error:&error];
        if (error) {
            NSLog(@"AVAssetWriter error %@", error.localizedDescription);
        }
        _writer = writer;
        
        if ([writer canAddInput:self.videoInput]) {
            [writer addInput:self.videoInput];
        }else {
            NSLog(@"添加input失败 videoInput");
        }
        
        if ([writer canAddInput:self.micAudioInput]) {
            [writer addInput:self.micAudioInput];
        }else {
            NSLog(@"添加input失败 micAudioInput");
        }
        
        if ([writer canAddInput:self.appAudioInput]) {
            [writer addInput:self.appAudioInput];
        }else {
            NSLog(@"添加input失败 appAudioInput");
        }
        
        [writer startWriting];
     
        _hasStopped = NO;
        __block BOOL hasStartSession = NO;
        __weak typeof(self) weakSelf = self;
        [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef  _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
            
            if (weakSelf.hasStopped) return ;
            
            if (!hasStartSession) {
                hasStartSession = YES;
                CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
                [writer startSessionAtSourceTime:pts];
            }
            if (writer.status == AVAssetWriterStatusUnknown) {
                return;
            }
            
            if (writer.status == AVAssetWriterStatusFailed) {
                NSLog(@"An error occured: %@", writer.error);
                [self stopRecording:^(NSString * _Nonnull filePath, NSError * _Nonnull error) {}];
                return;
            }
            
            if (bufferType == RPSampleBufferTypeVideo) {
                CFRetain(sampleBuffer);
                if (weakSelf.videoInput.isReadyForMoreMediaData) {
                    NSLog(@"ready for weakSelf.videoInput");
                    // 将sampleBuffer添加进视频输入源
                    [weakSelf.videoInput appendSampleBuffer:sampleBuffer];
                    CFRelease(sampleBuffer);
                } else {
                    NSLog(@"Not ready for videoInput");
                }
            }
            else if (bufferType == RPSampleBufferTypeAudioApp) {
                
                if (weakSelf.appAudioInput.isReadyForMoreMediaData) {
                    NSLog(@"ready for RPSampleBufferTypeAudioApp");
                    CFRetain(sampleBuffer);
                    // 将sampleBuffer添加进音频输入源
                    [weakSelf.appAudioInput appendSampleBuffer:sampleBuffer];
                    CFRelease(sampleBuffer);
                } else {
                    NSLog(@"Not ready for _appAudioInput");
                }
            }
            else if (bufferType == RPSampleBufferTypeAudioMic) {
                if (weakSelf.micAudioInput.isReadyForMoreMediaData) {
                    NSLog(@"ready for RPSampleBufferTypeAudioMic");
                    CFRetain(sampleBuffer);
                    // 将sampleBuffer添加进视频输入源
                    [weakSelf.micAudioInput appendSampleBuffer:sampleBuffer];
                    CFRelease(sampleBuffer);
                } else {
                    NSLog(@"Not ready for _micAudioInput");
                }
            }
            
        } completionHandler:^(NSError * _Nullable error) {
            if(callBack) {
                callBack(error);
            }
            NSLog(@"completionHandler %@", error.localizedDescription);
        }];
    } else {
        
        //iOS11以下开启录屏方案
    }
    
}

/**停止录屏*/
- (void)stopRecording:(void(^)(NSString *filePath, NSError *error))callBack
{
    _hasStopped = YES;
    
    __weak typeof(self) weakSelf = self;
    if (@available(iOS 11.0, *)) {
        [[RPScreenRecorder sharedRecorder] stopCaptureWithHandler:^(NSError * _Nullable error) {
            if (error) {
                NSLog(@"stopCaptureWithHandler: %@", error);
            }
            // 结束写入
            [self.writer finishWritingWithCompletionHandler:^{
                weakSelf.writer = nil;
                weakSelf.videoInput = nil;
                weakSelf.appAudioInput = nil;
                weakSelf.micAudioInput = nil;
                NSLog(@"屏幕录制结束，视频地址: %@", weakSelf.videoPath);
                if (callBack) {
                    callBack(weakSelf.videoPath, nil);
                }
            }];
        }];
    } else {
        
        //iOS11以下停止录屏方案
    }
}


#pragma mark - private Method

- (NSDictionary *)audioSettingDict
{
    NSDictionary *audioInputSetting = @{ AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                                         AVSampleRateKey : @(16000),
                                         AVNumberOfChannelsKey : @1,
    };
    
    
    return audioInputSetting;
}

- (NSString *)getTempDirectory
{
    NSString *ttlxDir = [[NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES) firstObject] stringByAppendingPathComponent:@"replayKit"];
    
    if (![[NSFileManager defaultManager] fileExistsAtPath:ttlxDir]) {
        NSError *error = nil;
        [[NSFileManager defaultManager] createDirectoryAtPath:ttlxDir withIntermediateDirectories:YES attributes:nil error:&error];
    }
    
    return ttlxDir;
}

#pragma mark - Getter

- (AVAssetWriterInput *)videoInput
{
    if (!_videoInput) {
        NSDictionary *compressionProperties = @{
            AVVideoAverageBitRateKey : [NSNumber numberWithDouble:2000 * 1000]
        };
        
        if (@available(iOS 11.0, *)) {
            NSDictionary *videoSettings = @{
                AVVideoCompressionPropertiesKey : compressionProperties,
                AVVideoCodecKey                 : AVVideoCodecTypeH264,
                AVVideoWidthKey                 : [NSNumber numberWithFloat:[UIScreen mainScreen].bounds.size.width],
                AVVideoHeightKey                : [NSNumber numberWithFloat:[UIScreen mainScreen].bounds.size.height]
            };
            
            _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
        } else {
          
        }
        _videoInput.expectsMediaDataInRealTime = YES;
    }
    return _videoInput;
}

- (AVAssetWriterInput *)appAudioInput
{
    if (!_appAudioInput) {
        _appAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioSettingDict]];
        _appAudioInput.expectsMediaDataInRealTime = YES;
    }
    return _appAudioInput;
}

- (AVAssetWriterInput *)micAudioInput
{
    if (!_micAudioInput) {
        _micAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioSettingDict]];
        _micAudioInput.expectsMediaDataInRealTime = YES;
    }
    return _micAudioInput;
}

@end
