//
//  YWTakeVideoController2.m
//  YWPhotoEditMaster
//
//  Created by jun peng on 2023/9/19.
//

#import "YWTakeVideoController2.h"
#import <AVFoundation/AVFoundation.h>
#import <Photos/Photos.h>


@interface YWTakeVideoController2 ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (weak, nonatomic) IBOutlet UIView *bgView;
- (IBAction)takeVideoClick:(UIButton *)sender;
@property (weak, nonatomic) IBOutlet UILabel *timeLabel;
@property (nonatomic,strong)NSTimer *timer;
@property(nonatomic,assign)NSInteger timeNum;
//- - - - - - -- --  -
@property(nonatomic,strong) AVCaptureDevice *device;//捕获设备，前置摄像头，后置摄像头（音频输入）
@property(nonatomic,strong) AVCaptureDevice *audioDevice;//捕获设备，麦克风（音频输入）
@property (strong,nonatomic) AVCaptureDeviceInput *input;//录像画面输入流
@property (strong,nonatomic) AVCaptureDeviceInput *audioInput;//声音输入流
@property (strong,nonatomic) AVCaptureSession *session;//负责输入和输出设置之间的数据传递
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *previewLayer;//相机拍摄预览图层
//- - - - - - -- --  -
@property (nonatomic ,strong) AVCaptureAudioDataOutput *audioDataOutput;   //音频输出源
@property (nonatomic ,strong) AVCaptureVideoDataOutput *videoDataOutput;   //视频输出源
//- - - - - - -- --  -
@property (nonatomic ,strong) AVCaptureConnection *connection;
@property (nonatomic ,strong) AVAssetWriter *writer;//视频采集
@property (nonatomic ,strong) AVAssetWriterInput *writerAudioInput;//音频采集
@property (nonatomic ,strong) AVAssetWriterInput *writerVideoInput;//视频采集

@property(nonatomic,strong)UIImageView *focusView;

@property(nonatomic,strong)NSURL *preVideoURL; //获取存储路径 存储路径在沙盒中，需要唯一
@property(nonatomic,assign)BOOL canWritting;

@end

@implementation YWTakeVideoController2


#warning - 用AVAssetWriter来拍视频（失败了）


#pragma mark - 这个拍视频VC用的是AVCaptureVideoDataOutput、AVCaptureAudioDataOutput


//AVAssetWriter不支持暂停录制，尝试过暂停文件写入，结果为空白段，且音频时间顺序混乱， 状态枚举无暂停状态，不支持

/*
 两种录制方式对比

 相同点：数据采集都在AVCaptureSession中进行，视频和音频的输入都一样，画面的预览一致。
 不同点：

     1.AVCaptureMovieFileOutput较为简便，只需要一个输出即可；
     AVAssetWriter 需要 AVCaptureVideoDataOutput 和 AVCaptureAudioDataOutput 两个单独的输出，拿到各自的输出数据后，然后自己进行相应的处理
     2.AVAssetWriter可以配置更多的参数，更为灵活
     3.文件处理不一致， AVAssetWriter可以拿到实时数据流
     AVCaptureMovieFileOutput，如果要剪裁视频，因为系统已经把数据写到文件中了，我们需要从文件中独到一个完整的视频，然后处理；
     而AVAssetWriter我们拿到的是数据流，还没有合成视频，对数据流进行处理
 */





-(UIImageView *)focusView{
    if(_focusView == nil){
        UIImageView *focusView = [[UIImageView alloc] init];
        focusView.image = [UIImage imageNamed:@"聚焦"];
        [self.bgView addSubview:focusView];
        focusView.bounds = CGRectMake(0, 0, 80, 80);
        focusView.hidden = YES;
        _focusView = focusView;
    }
    return _focusView;
}


-(AVCaptureDevice *)device{
    if(_device == nil){
        AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        _device = device;
        
    }
    return _device;
}

-(AVCaptureDevice *)audioDevice{
    if(_audioDevice == nil){
        AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        _audioDevice = audioDevice;
    }
    return _audioDevice;
}


-(AVCaptureDeviceInput *)input{
    if(_input == nil){
        NSError *error = nil;
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:&error];
        _input = input;
        
    }
    return _input;
}

-(AVCaptureDeviceInput *)audioInput{
    if(_audioInput == nil){
        NSError *error = nil;
        AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:self.audioDevice error:&error];
        _audioInput = audioInput;
        
    }
    return _audioInput;
}



-(AVCaptureAudioDataOutput *)audioDataOutput{ //音频输出源
    if(_audioDataOutput == nil){
        AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
        _audioDataOutput = audioDataOutput;
        
        
    }
    return _audioDataOutput;
}
-(AVCaptureVideoDataOutput *)videoDataOutput{ //视频输出源
    if(_videoDataOutput == nil){
        AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        _videoDataOutput = videoDataOutput;
        
        
        // 设置 imageConnection 控制相机拍摄视频的角度方向
           AVCaptureConnection *imageConnection = [videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
           if (imageConnection.supportsVideoOrientation) {
               imageConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
           }
        
    }
    return _videoDataOutput;
}

-(AVCaptureSession *)session{
    if(_session == nil){
        AVCaptureSession *session = [[AVCaptureSession alloc] init];
        _session = session;
        //拿到的图像的大小可以自行设定
        if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]){
                self.session.sessionPreset = AVCaptureSessionPresetHigh;
            }
        else if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
            [session setSessionPreset:AVCaptureSessionPreset1280x720];
        }
    }
    return _session;
}

-(AVCaptureVideoPreviewLayer *)previewLayer{
    if(_previewLayer == nil){
        AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        _previewLayer = previewLayer;
        
        previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait; // 图层展示拍摄角度方向
    }
    return _previewLayer;
}


- (void)viewDidLoad {
    [super viewDidLoad];
    
    self.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithTitle:@"取消" style:UIBarButtonItemStylePlain target:self action:@selector(cancelClick)];
    
    UIButton *btn = [[UIButton alloc] init];
    [btn setBackgroundImage:[UIImage imageNamed:@"摄像头反转"] forState:UIControlStateNormal];
    btn.size = CGSizeMake(30, 30);
    [btn addTarget:self action:@selector(toggleCamera) forControlEvents:UIControlEventTouchUpInside];
    self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithCustomView:btn];
    
    self.timeLabel.hidden = YES;
    self.timeNum = 0;
    
    //准备拍照
    [self prepareTakePhotoAuthorization];
    
    //开始扫描
    [self startScan];
    
    
    //添加手势做聚焦
    UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusGesture:)];
    [self.bgView addGestureRecognizer:tap];
    
}


- (void)focusGesture:(UITapGestureRecognizer*)gesture{
    CGPoint point = [gesture locationInView:gesture.view];
    [self setFocusCursorWithPoint:point];
    CGSize size = self.bgView.bounds.size;
    // focusPoint
    //它的坐标系是先把手机的头向左倒置地横向摆放、然后再以左上角为坐标（0,0）(有“Interest”名字的坐标都是横着来看)
    CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width);
    if ([self.device lockForConfiguration:nil]) {
        [self.session beginConfiguration];
        /*****必须先设定聚焦位置，在设定聚焦方式******/
        //聚焦点的位置
        if ([self.device isFocusPointOfInterestSupported]) {
            [self.device setFocusPointOfInterest:focusPoint];
        }
        // 聚焦模式
        if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
            [self.device setFocusMode:AVCaptureFocusModeAutoFocus];
        }else{
            NSLog(@"聚焦模式修改失败");
        }
        //曝光点的位置
        if ([self.device isExposurePointOfInterestSupported]) {
            [self.device setExposurePointOfInterest:focusPoint];
        }
        //曝光模式
        if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
            [self.device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
        } else {
            NSLog(@"曝光模式修改失败");
        }
        [self.device unlockForConfiguration];
        [self.session commitConfiguration];
    }
}

-(void)setFocusCursorWithPoint:(CGPoint)point{
     //下面是手触碰屏幕后对焦的效果
    self.focusView.center = point;
    self.focusView.hidden = NO;
    
    [UIView animateWithDuration:0.3 animations:^{
        self.focusView.transform = CGAffineTransformMakeScale(1.25, 1.25);
    }completion:^(BOOL finished) {
        [UIView animateWithDuration:0.5 animations:^{
            self.focusView.transform = CGAffineTransformIdentity;
        } completion:^(BOOL finished) {
            self.focusView.hidden = YES;
        }];
    }];
    
}




-(void)viewDidLayoutSubviews{
    [super viewDidLayoutSubviews];
    self.previewLayer.frame = self.bgView.bounds;
}

//开启拍照权限
-(void)prepareTakePhotoAuthorization{
    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    
    if(authStatus != AVAuthorizationStatusAuthorized){
        [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
                    dispatch_sync(dispatch_get_main_queue(), ^{
                        if (granted) {
                           // 申请权限成功
                        } else {
                            // 申请权限失败
                        }
                    });
        }];
    }
}

-(void)startScan{
    
    if([self.session canAddInput:self.input] && [self.session canAddInput:self.audioInput]  && [self.session canAddOutput:self.audioDataOutput] && [self.session canAddOutput:self.videoDataOutput]) {//不是所有的输入输出都能添加进来所以要判断
        
        //会话添加输入输出
        [self.session addInput:self.input];
        [self.session addInput:self.audioInput];
        [self.session addOutput:self.audioDataOutput];
        [self.session addOutput:self.videoDataOutput];
    }
    else{
        return;
    }
    
    [self.bgView.layer addSublayer:self.previewLayer];
    
    [self.session startRunning];
    
}


- (void)dealloc{
    [self.session stopRunning];
}

- (IBAction)takeVideoClick:(UIButton *)sender{
    
    sender.selected = !sender.selected;
    if(sender.selected){
        
        self.timeLabel.hidden = NO;
        
        if (self.timer == nil) { //定时器不存在才去加载
            //添加定时器
            [self addTimer];
        }
        
        //开始录制视频
        [self beginTakeVideo];
        
        
        
    }
    else{
        
        [self removeTimer];
        
        self.timeLabel.hidden = YES;
        
        self.timeNum = 0;
        
        self.timeLabel.text = [YWTimeTool getFormatTime:self.timeNum];
        
        //完成录制
        [self finishTakeVideo];
    }
}


//添加定时器
-(void)addTimer{
    
    NSTimer *timer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(addOneTime) userInfo:nil repeats:YES];
    
    [[NSRunLoop mainRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];
    self.timer = timer;
}

//移除定时器
-(void)removeTimer{
    
    [self.timer invalidate];
    
    self.timer = nil;
    
}

-(void)addOneTime{
    self.timeNum ++;
    self.timeLabel.text = [YWTimeTool getFormatTime:self.timeNum];
}


-(void)cancelClick{
    [self dismissViewControllerAnimated:YES completion:nil];
}

//切换摄像头
- (void)toggleCamera{
   
    [UIView transitionWithView:self.bgView duration:0.5 options:UIViewAnimationOptionTransitionFlipFromLeft animations:^{
        
            AVCaptureDevicePosition position = self.input.device.position;
               AVCaptureDevice *newVideoDevice = nil;
               if (position == AVCaptureDevicePositionBack) {
                   newVideoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionFront];
               } else {
                   newVideoDevice = [self videoDeviceWithPosition:AVCaptureDevicePositionBack];
               }
               NSError *error;
               AVCaptureDeviceInput *newVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:newVideoDevice error:&error];
               if (error) {
                   // 错误处理
                   return;
               }
               [self.session beginConfiguration];
               [self.session removeInput:self.input];
               if ([self.session canAddInput:newVideoInput]) {
                   [self.session addInput:newVideoInput];
                   self.device = newVideoDevice;
                   self.input = newVideoInput;
               } else {
                   [self.session addInput:self.input];
               }
               [self.session commitConfiguration];
            
    } completion:^(BOOL finished) {

    }];
    
}


#pragma mark - 私有方法/***  取得指定位置的摄像头**  @param position 摄像头位置**  @return 摄像头设备*/
- (AVCaptureDevice *)videoDeviceWithPosition:(AVCaptureDevicePosition)position {
    AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
    NSArray *devices = discoverySession.devices;
    for (AVCaptureDevice *device in devices) {
        if (device.position == position) {
            return device;
        }
    }
    return nil;
}



//---- ------ - -- ----- - -- - ---- ----- - -- - --

//视频录制方式一 --- 通过AVAssetWriter写入
//视频录制需要在沙盒中先生成一个路径，用于存储视频录制过程中的文件信息写入，等视频资料全部写入完成后，即可获取到完整的视频

//生成路径
- (NSURL *)createVideoFilePathUrl
{
    NSString *documentPath = [NSHomeDirectory() stringByAppendingString:@"/Documents/shortVideo"];
    
//    NSString *documentPath = YWVideoSavepath;

    NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
    [dateFormatter setDateFormat:@"yyyyMMddHHmmss"];

    NSString *destDateString = [dateFormatter stringFromDate:[NSDate date]];
    NSString *videoName = [destDateString stringByAppendingString:@".mp4"];

    NSString *filePath = [documentPath stringByAppendingFormat:@"/%@",videoName];

    NSFileManager *manager = [NSFileManager defaultManager];
    BOOL isDir;
    if (![manager fileExistsAtPath:documentPath isDirectory:&isDir]) {
        [manager createDirectoryAtPath:documentPath withIntermediateDirectories:YES attributes:nil error:nil];

    }
    
    return [NSURL fileURLWithPath:filePath];
}

//开始录制, 完成录制配置的设置
-(void)beginTakeVideo{
    //2.1 获取存储路径 存储路径在沙盒中，需要唯一
    self.preVideoURL = [self createVideoFilePathUrl];
    
    //2.2 开启异步线程进行写入配置
    dispatch_queue_t writeQueueCreate = dispatch_queue_create("writeQueueCreate", DISPATCH_QUEUE_CONCURRENT);
    dispatch_async(writeQueueCreate, ^{

    });
    
    //2.3. 生成视频采集对象
    NSError *error = nil;
    self.writer = [AVAssetWriter assetWriterWithURL:self.preVideoURL fileType:AVFileTypeMPEG4 error:&error];

    //2.4. 生成图像采集对象并添加到视频采集对象 可以对图像及音频采集对象进行设置，格式，尺寸，码率、帧率、频道等等
    CGFloat width = self.bgView.width;
    CGFloat height = self.bgView.height;
    NSInteger numPixels = width * height;
    //每像素比特
     CGFloat bitsPerPixel = 12.0;
    NSInteger bitsPerSecond = numPixels * bitsPerPixel;
    // 码率和帧率设置
    NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond),
                                                         AVVideoExpectedSourceFrameRateKey : @(30),
                                                         AVVideoMaxKeyFrameIntervalKey : @(30),
                                                         AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel };
    //视频属性
    NSDictionary *videoSetting = @{ AVVideoCodecKey : AVVideoCodecTypeH264,
                                                AVVideoWidthKey : @(width),
                                                AVVideoHeightKey : @(height),
                                                AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
                                                AVVideoCompressionPropertiesKey : compressionProperties };
    self.writerVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSetting];
    self.writerVideoInput.expectsMediaDataInRealTime = YES; //expectsMediaDataInRealTime 必须设为yes，需要从capture session 实时获取数据

    if ([self.writer canAddInput:self.writerVideoInput]) {
         [self.writer addInput:self.writerVideoInput];
    }


    //2.5. 生成音频采集对象并添加到视频采集对象
    NSDictionary *audioSetting = @{ AVEncoderBitRatePerChannelKey : @(28000),
                                                AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                                                AVNumberOfChannelsKey : @(1),
                                                AVSampleRateKey : @(22050) };
    self.writerAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSetting];
                
    self.writerAudioInput.expectsMediaDataInRealTime = YES; //expectsMediaDataInRealTime 必须设为yes，需要从capture session 实时获取数据
                
     if ([self.writer canAddInput:self.writerAudioInput]) {
         [self.writer addInput:self.writerAudioInput];
    }

    //上面的写法会在获取到视频信息的时候开始写入录制，避免出现先写入语音信息，导致开始的时候有语音但是没有视频信息问题出现 （实测此问题不明显，根据个人需要看是否添加）
    //startSessionAtSourceTime方法用于设置开始播放时间

    
    
    //视频
    NSDictionary *video_Setting = @{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
    self.videoDataOutput.videoSettings = video_Setting;
    self.videoDataOutput.alwaysDiscardsLateVideoFrames = YES; //立即丢弃旧帧，节省内存，默认YES
    
    dispatch_queue_t videoQueue = dispatch_queue_create("vidio", DISPATCH_QUEUE_CONCURRENT);
    [self.videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
    
    //音频
    dispatch_queue_t audioQueue = dispatch_queue_create("audio", DISPATCH_QUEUE_CONCURRENT);
    [self.audioDataOutput setSampleBufferDelegate:self queue:audioQueue]; // 设置写入代理
}

//音频视频的输出的代理方法都是走这个代理方法
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    
    //3.文件写入 开始录制可以设置开始播放时间，避免开头空白视频的问题
    
    //startSessionAtSourceTime
    //在回调方法captureOutput:didOutputSampleBuffer:romConnection:中，第一次收到数据时启动文件写入，并将每一次的数据写入到文件中
    
    CMFormatDescriptionRef desMedia = CMSampleBufferGetFormatDescription(sampleBuffer);
    CMMediaType mediaType = CMFormatDescriptionGetMediaType(desMedia);
    if (mediaType == kCMMediaType_Video) {
        if (!self.canWritting) {
            [self.writer startWriting];
            CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            self.canWritting = YES;
            [self.writer startSessionAtSourceTime:timestamp];
        }
    }
    
    if (self.canWritting) {
        if (mediaType == kCMMediaType_Video) {
            if (self.writerVideoInput.readyForMoreMediaData) {
                BOOL success = [self.writerVideoInput appendSampleBuffer:sampleBuffer];
                if (!success) {
                    NSLog(@"video write failed");
                }
            }
        }else if (mediaType == kCMMediaType_Audio){
            if (self.writerAudioInput.readyForMoreMediaData) {
                BOOL success = [self.writerAudioInput appendSampleBuffer:sampleBuffer];
                if (!success) {
                    NSLog(@"audio write failed");
                }
            }
        }
    }

}

//4.结束录制
-(void)finishTakeVideo{
    //创建异步线程并在其中完成结束录制操作
    dispatch_queue_t writeQueue = dispatch_queue_create("writeQueue", DISPATCH_QUEUE_CONCURRENT);
    MJWeakSelf
    dispatch_async(writeQueue, ^{
       if (weakSelf.writer.status == AVAssetWriterStatusWriting) {
             [weakSelf.writer finishWritingWithCompletionHandler:^{
                   /// 完成操作
                 YWLog(@"完成操作");
                 
                 //录制完成之后可以通过之前的路径来获取视频文件，进行播放、保存等操作
                 //保存
                 PHPhotoLibrary *photoLibrary = [PHPhotoLibrary sharedPhotoLibrary];
                 [photoLibrary performChanges:^{
                     [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:self.preVideoURL];
                 } completionHandler:^(BOOL success, NSError * _Nullable error) {
                     if (success) {
                         NSLog(@"已将视频保存至相册");
                     } else {
                         NSLog(@"未能保存视频到相册");
                     }
                 }];

             }];
        }
    });

}

@end
