//
//  YTCameraManager.m
//  YUNT
//
//  Created by 梁炜东 on 2017/11/13.
//  Copyright © 2017年 梁炜东. All rights reserved.
//

#import "YTCameraManager.h"
#import "GPUImage.h"
#import "LFGPUImageBeautyFilter.h"
#import <MediaPlayer/MediaPlayer.h>
#import <AVKit/AVKit.h>

#define kMoviePath @"kMoviePath"
#define kMovieTime @"kMovieTime"
#define kMovieSpeed @"kMovieSpeed"
#define kMovieIndex @"kMovieIndex"
#define kMovieSpeed_Normal @"kMovieSpeed_Normal"

//#define kMovieSpeed_Fast @"kMovieSpeed_Fast"
//#define kMovieSpeed_Slow @"kMovieSpeed_Slow"
// 分段视频临时存储路径
#define kCachePath NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).lastObject
#define kPhotoDelayPath [NSString stringWithFormat:@"%@/camera/delay/", NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).lastObject]

@interface YTCameraManager()<GPUImageVideoCameraDelegate>
/*
 非camera相关
 */
@property(nonatomic, strong)UIView *videoView;
@property (nonatomic, strong) AVCaptureDeviceFormat *defaultFormat;
@property (nonatomic) CMTime defaultMinFrameDuration;
@property (nonatomic) CMTime defaultMaxFrameDuration;

//******** GPUImage Property ***********
//大基类
@property(nonatomic, strong)GPUImageOutput<GPUImageInput> *baseFilter;

@property (nonatomic, strong) GPUImageStillCamera *videoCamera;
//@property (nonatomic, strong) GPUImageFilterGroup *normalFilter;
@property (nonatomic, strong) GPUImageFilterGroup *imageFilter;

@property (nonatomic, strong) GPUImageMovieWriter *movieWriter;
//@property (nonatomic, strong) LFGPUImageBeautyFilter *leveBeautyFilter;
@property (nonatomic, strong) GPUImageView *cameraView;



@property (nonatomic, strong) GPUImageExposureFilter *exposureFiler;
@property(nonatomic, strong) dispatch_source_t timingTimer;
@property (nonatomic, strong) UIImageView *exposoursImageView;

@property(nonatomic, strong)NSString *movieOutputFilePath;

@property(nonatomic, strong)dispatch_source_t delayTimer;//延时拍照计时器
@end

@implementation YTCameraManager
#pragma mark - setter&getter
+ (YTCameraManager *)shareModel
{
    static YTCameraManager *shareManager = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        shareManager = [[self alloc] init];
    });
    return shareManager;
    
}
- (GPUImageView *)cameraView
{
    if (!_cameraView) {
        _cameraView = [[GPUImageView alloc] init];
        [_cameraView setFillMode:kGPUImageFillModePreserveAspectRatioAndFill];
    }
    return _cameraView;
}
//- (GPUImageFilterGroup *)normalFilter {
//    if (!_normalFilter) {
//        GPUImageFilter *filter = [[GPUImageFilter alloc] init]; //默认
//        _normalFilter = [[GPUImageFilterGroup alloc] init];
//        [(GPUImageFilterGroup *) _normalFilter setInitialFilters:[NSArray arrayWithObject: filter]];
//        [(GPUImageFilterGroup *) _normalFilter setTerminalFilter:filter];
//    }
//    return _normalFilter;
//}
- (GPUImageFilterGroup *)imageFilter
{
    if (!_imageFilter) {
        _imageFilter = [[GPUImageFilterGroup alloc] init];
    }
    return _imageFilter;
}
//- (LFGPUImageBeautyFilter *)leveBeautyFilter {
//    if (!_leveBeautyFilter) {
//        _leveBeautyFilter = [[LFGPUImageBeautyFilter alloc] init];
//    }
//    return _leveBeautyFilter;
//}
- (GPUImageStillCamera *)videoCamera {
    if (!_videoCamera) {
        _videoCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
        UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
        _videoCamera.outputImageOrientation = orientation;
        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;
        _videoCamera.delegate = self;
    }
    return _videoCamera;
}

- (AVCaptureFlashMode)captureFlashMode
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        return self.videoCamera.inputCamera.flashMode;
    }else{
        return 0;
    }
}
- (AVCaptureTorchMode)captureTorchMode
{
    
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        return self.videoCamera.inputCamera.torchMode;
    }else{
        return 0;
    }
}
- (UIImageView *)exposoursImageView
{
    if (!_exposoursImageView) {
        _exposoursImageView = [[UIImageView alloc] init];
    }
    return _exposoursImageView;
}

#pragma mark - init
-(instancetype)init
{
    self = [super init];
    if (self)
    {
        [self initNotification];
    }
    return self;
}
-(void)initNotification
{
    [NOTIFICATION_CENTER addObserver:self selector:@selector(p_applicationDidEnterBackground) name:UIApplicationDidEnterBackgroundNotification object:nil];
    [NOTIFICATION_CENTER addObserver:self selector:@selector(p_applicationWillEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil];
}
#pragma mark notification
-(void)p_applicationDidEnterBackground
{
    [self.videoCamera.inputCamera torchMode];
    [UserDefaults setInteger:[self.videoCamera.inputCamera torchMode] forKey:@"torchMode"];
    [UserDefaults synchronize];
}
-(void)p_applicationWillEnterForeground
{
    NSInteger torchMode = [UserDefaults integerForKey:@"torchMode"];
    if ([self.videoCamera.inputCamera torchMode] != torchMode)
    {
        [self torchLight:torchMode];
    }
}
-(void)configInitView:(UIView *)videoView
{
    _videoView= videoView;
    [_videoView addSubview:self.cameraView];
    self.cameraView.frame = _videoView.bounds;
//    [self.cameraView addSubview:self.exposoursImageView];
//    self.exposoursImageView.frame = self.cameraView.bounds;

    [self initCameraDevice];
}
-(void)configOrientation
{
    self.cameraView.frame = _videoView.bounds;
    UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
    if (_videoCamera)
    {
        _videoCamera.outputImageOrientation = orientation;
    }
}
#pragma mark private method
-(void)initCameraDevice
{
    // 保存默认的AVCaptureDeviceFormat
    _defaultFormat = self.videoCamera.inputCamera.activeFormat;
    _defaultMinFrameDuration = self.videoCamera.inputCamera.activeVideoMinFrameDuration;
    _defaultMaxFrameDuration = self.videoCamera.inputCamera.activeVideoMaxFrameDuration;

    publicModel.resolution = YTVideoResolution1080;
    //美颜效果
    [self BeautyFilter:publicModel.isBeauty];
    [self.videoCamera startCameraCapture];
}
- (void)isSupportedStabilization:(BOOL)open
{
    if (open) {
        // 判断是否支持光学防抖
        if ([self.videoCamera.inputCamera.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeAuto])
        {
            //如果支持防抖就打开防抖
            self.videoCamera.videoCaptureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
        }
    }else{
        if ([self.videoCamera.inputCamera.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeOff])
        {
            self.videoCamera.videoCaptureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeOff;
        }
    }
    
    
}
- (void)BeautyFilter:(BOOL)isBeauty
{
    publicModel.isBeauty = isBeauty;
    [self addBeauty:isBeauty videlFilter:publicModel.videoFilter];
    
}
- (void)addBeauty:(BOOL)beauty videlFilter:(YTVideoFilter)filter
{
    [self.imageFilter removeAllTargets];
    self.imageFilter = nil;
    [self.videoCamera removeAllTargets];
    if (beauty) {
        LFGPUImageBeautyFilter *beautyFilter = [[LFGPUImageBeautyFilter alloc] init];
        [self addGPUImageFilter:beautyFilter];
        if (filter == YTRejuvenationFilter){
            GPUImageToneCurveFilter *levelsFilter = [[GPUImageToneCurveFilter alloc] init];
            [self addGPUImageFilter:levelsFilter];
        }else if (filter == YTGenialFilter) {
            GPUImageSepiaFilter *sepiaFilter = [[GPUImageSepiaFilter alloc] init];
            [self addGPUImageFilter:sepiaFilter];
        }else if (filter == YTMidnightFilter){
            GPUImageGrayscaleFilter *grayFilter = [[GPUImageGrayscaleFilter alloc] init];
            [self addGPUImageFilter:grayFilter];
        }else  if (filter == YTSunshineFilter){
            GPUImageColorInvertFilter *invertFilter = [[GPUImageColorInvertFilter alloc] init];
            [self addGPUImageFilter:invertFilter];
        }
    }else{
        GPUImageFilter *filter = [[GPUImageFilter alloc] init]; //默认
        [self.imageFilter setInitialFilters:[NSArray arrayWithObject:filter]];
        [self.imageFilter setTerminalFilter:filter];
    }
    [self.videoCamera addTarget:self.imageFilter];
    [self.imageFilter addTarget:self.cameraView];
    [self.imageFilter useNextFrameForImageCapture];
}
- (void)addGPUImageFilter:(GPUImageFilter *)filter{
    
    [self.imageFilter addFilter:filter];
    
    GPUImageOutput<GPUImageInput> *newTerminalFilter = filter;
    
    NSInteger count = self.imageFilter.filterCount;
    
    if (count == 1)
    {
        //设置初始滤镜
        self.imageFilter.initialFilters = @[newTerminalFilter];
        //设置末尾滤镜
        self.imageFilter.terminalFilter = newTerminalFilter;
        
    } else
    {
        GPUImageOutput<GPUImageInput> *terminalFilter    = self.imageFilter.terminalFilter;
        NSArray *initialFilters                          = self.imageFilter.initialFilters;
        
        [terminalFilter addTarget:newTerminalFilter];
        
        //设置初始滤镜
        self.imageFilter.initialFilters = @[initialFilters[0]];
        //设置末尾滤镜
        self.imageFilter.terminalFilter = newTerminalFilter;
    }
}

- (void)createNewWritter {
    
    CGFloat width = 720,height = 1280.0,per = 60;
    if (publicModel.resolution == YTVideoResolution2160){
        width = 2160;
        height = 3840.0;
        per = 30;
    }else if (publicModel.resolution == YTVideoResolution1080) {
        width = 1080.0;
        height = 1920.0;
        per = 60;
    }else if (publicModel.resolution == YTVideoResolution720){
        width = 720;
        height = 1280.0;
        per = 30;
    }else if (publicModel.resolution == YTVideoResolution480){
        width = 480;
        height = 640.0;
        per = 30;
    }
    
    
    NSDictionary *videoCompressionProps;
    /*
    switch (publicModel.resolution){
        case YTVideoResolution2160:
            videoCompressionProps = @{
                                      AVVideoAverageBitRateKey:@(50*1024.0*1024),
                                      AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC,
                                      AVVideoMaxKeyFrameIntervalKey:@(30),
                                      AVVideoAllowFrameReorderingKey:@NO,
                                      AVVideoExpectedSourceFrameRateKey:@30
                                      };
            break;
        case YTVideoResolution1080:{
            videoCompressionProps = @{
                                      AVVideoAverageBitRateKey:@(18*1024.0*1024),
                                      AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC
                                      };
        }
            break;
        case YTVideoResolution720:
            videoCompressionProps = @{
                                      AVVideoAverageBitRateKey:@(8*1024.0*1024),
                                      AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC
                                      };
            break;
        default:
            break;
            
    }
    */
    _movieOutputFilePath = [kCachePath stringByAppendingPathComponent:[self movieName]];
    videoCompressionProps = @{
                              AVVideoExpectedSourceFrameRateKey:@(per)
                              };
    NSDictionary * videoSettings =@{AVVideoCodecKey:AVVideoCodecH264,
                                    AVVideoWidthKey:@(width),
                                    AVVideoHeightKey:@(height),
                                    AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
                                    AVVideoCompressionPropertiesKey:videoCompressionProps
                                    };
    NSString *fileType = AVFileTypeQuickTimeMovie;
    if (publicModel.fileType == 1) {
        fileType = AVFileTypeMPEG4;
    }
    _movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:[NSURL fileURLWithPath:_movieOutputFilePath] size:CGSizeMake(width, height) fileType:fileType outputSettings:videoSettings];
    /// 如果不加上这一句，会出现第一帧闪现黑屏
    [_videoCamera addAudioInputsAndOutputs];
    _videoCamera.audioEncodingTarget = _movieWriter;
}
#pragma mark 切换摄像头
- (void)switchCamera
{
    [self.videoCamera pauseCameraCapture];
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        [self.videoCamera rotateCamera];
        [self.videoCamera resumeCameraCapture];
    });
}
#pragma mark - 点击屏幕对焦
-(void)clickScreenFoucs:(CGPoint)point
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        CGPoint location = point;
        CGPoint pointOfInerest = CGPointMake(0.5, 0.5);
        CGSize frameSize = self.cameraView.layer.frame.size;
        if ([self.videoCamera.inputCamera position] == AVCaptureDevicePositionFront) location.x = frameSize.width - location.x;
        pointOfInerest = CGPointMake(location.y / frameSize.height, 1.f - (location.x / frameSize.width));
        [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:pointOfInerest];
        
        //        [[self.captureDeviceInput device] addObserver:self forKeyPath:@"ISO" options:NSKeyValueObservingOptionNew context:NULL];
    }else
    {
        // Handle the error appropriately.
    }
}
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        if ([self.videoCamera.inputCamera isFocusModeSupported:focusMode]) [self.videoCamera.inputCamera setFocusMode:AVCaptureFocusModeAutoFocus];
        if ([self.videoCamera.inputCamera isFocusPointOfInterestSupported]) [self.videoCamera.inputCamera setFocusPointOfInterest:point];
        if ([self.videoCamera.inputCamera isExposureModeSupported:exposureMode]) [self.videoCamera.inputCamera setExposureMode:AVCaptureExposureModeAutoExpose];
        if ([self.videoCamera.inputCamera isExposurePointOfInterestSupported]) [self.videoCamera.inputCamera setExposurePointOfInterest:point];
    }else
    {
        // Handle the error appropriately.
    }
}
#pragma mark 录像
-(void)record
{
    [self createNewWritter];
    [self.imageFilter addTarget:self.movieWriter];
    [self.movieWriter startRecording];
    
}
#pragma mark - 暂停
- (void)pause
{
    //    [(publicModel.isBeauty ? self.leveBeautyFilter : self.normalFilter) removeTarget:self.movieWriter];
    [self.baseFilter removeTarget:self.movieWriter];
    [self.movieWriter finishRecordingWithCompletionHandler:^{
        NSError *error = nil;
        
        BOOL isDir;
        if (![FileManager fileExistsAtPath:kVideoPath isDirectory:&isDir])
        {
            [FileManager createDirectoryAtPath:kVideoPath withIntermediateDirectories:YES attributes:nil error:nil];
        }
        NSLog(@"%@",[NSString stringWithFormat:@"%@%@", kVideoPath, [_movieOutputFilePath lastPathComponent]]);
        BOOL moveSuc = [FileManager moveItemAtPath:_movieOutputFilePath toPath:[NSString stringWithFormat:@"%@/%@", kVideoPath, [_movieOutputFilePath lastPathComponent]] error:&error];
        if (!moveSuc || error)
        {
            NSLog(@"移动失败");
        }
    }];
}


#pragma mark 拍照
-(void)takePhoto
{
    //    [self.videoCamera capturePhotoAsImageProcessedUpToFilter:self.imageFilter withCompletionHandler:^(UIImage *processedImage, NSError *error) {
    //        if(error){
    //            return;
    //        }
    ////        //存入本地相册
    //        UIImageWriteToSavedPhotosAlbum(processedImage, nil, nil, nil);
    //    }];

    [self.videoCamera capturePhotoAsJPEGProcessedUpToFilter:self.imageFilter withOrientation:UIImageOrientationUp withCompletionHandler:^(NSData *processedJPEG, NSError *error)
     {
         if (error){ return; }
         BOOL isDir;
         if (![FileManager fileExistsAtPath:kPhotoPath isDirectory:&isDir])
         {
             [FileManager createDirectoryAtPath:kPhotoPath withIntermediateDirectories:YES attributes:nil error:nil];
         }
         NSString *file = [NSString stringWithFormat:@"%@/%@", kPhotoPath, [self photoName]];
         NSError *errorhaha;
         [processedJPEG writeToURL:[NSURL fileURLWithPath:file] options:NSDataWritingAtomic error:&errorhaha];
     }];
}
#pragma mark 设置iso时间
-(void)setIso:(CGFloat)iso
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error]) {
        CGFloat minISO = self.videoCamera.inputCamera.activeFormat.minISO;
        CGFloat maxISO = self.videoCamera.inputCamera.activeFormat.maxISO;
        CGFloat currentISO = (maxISO - minISO) * iso + minISO;
        [self.videoCamera.inputCamera setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent ISO:currentISO completionHandler:nil];
        [self.videoCamera.inputCamera unlockForConfiguration];
    }else{
        // Handle the error appropriately.
    }
}
#pragma mark 设置曝光时间
-(void)setExposure:(CGFloat)exposure
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error]) {
        CMTime minExposure = self.videoCamera.inputCamera.activeFormat.minExposureDuration;
        CMTime maxExposure = self.videoCamera.inputCamera.activeFormat.maxExposureDuration;
        
        //    A:在Objective-C转换CMTime以人类可读的时间
        float minDurationFloat = CMTimeGetSeconds(minExposure);
        float maxDurationFloat = CMTimeGetSeconds(maxExposure);
        float curDurationFloat = 0.0;
        float cha = maxDurationFloat - minDurationFloat;
        curDurationFloat = exposure*cha/7.0;
        NSLog(@"");
        //    B:把float转换成CMTime
        CMTime mTime = CMTimeMake(curDurationFloat*1000000, 1000000);
        [self.videoCamera.inputCamera setExposureModeCustomWithDuration:mTime ISO:38 completionHandler:^(CMTime syncTime) {
            NSLog(@"");
        }];
        [self.videoCamera.inputCamera unlockForConfiguration];
    }else{
        // Handle the error appropriately.
    }
}
#pragma mark - 多重曝光
-(void)setMoreExposureWithCount:(EventTwoHandler)handler
{
    [self.cameraView addSubview:self.exposoursImageView];
    self.exposoursImageView.frame = self.cameraView.bounds;
    [self.imageFilter removeAllTargets];
    self.imageFilter = nil;
    [self.videoCamera removeAllTargets];
    self.exposureFiler = [[GPUImageExposureFilter alloc] init];
    self.exposureFiler.exposure = 0;
    [self.videoCamera addTarget:self.exposureFiler];
    [self.exposureFiler addTarget:self.cameraView];
    __block NSInteger timeout= 60; //倒计时时间
    __block UIImage *exposureImage;
    dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    self.timingTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0,queue);
    dispatch_source_set_timer(self.timingTimer,dispatch_walltime(NULL, 0),1.0*NSEC_PER_SEC, 0); //每秒执行
    dispatch_source_set_event_handler(self.timingTimer, ^{
        
        if(timeout<=0){ //倒计时结束，关闭
            [self endMoreExposure];
            dispatch_async(dispatch_get_main_queue(), ^{
                self.exposoursImageView.image = exposureImage;
                BlockCallWithTwoArg(handler, @(60 -timeout), exposureImage);
            });
        }else{
            self.exposureFiler.exposure += 0.3;
             SystemSoundID soundID = 0;
            if (soundID == 0) {
                NSString *path = [[NSBundle mainBundle] pathForResource:@"photoShutter2" ofType:@"caf"];
                NSURL *filePath = [NSURL fileURLWithPath:path isDirectory:NO];
                AudioServicesCreateSystemSoundID((__bridge CFURLRef)filePath, &soundID);
            }
            AudioServicesPlaySystemSound(soundID);
            [self.videoCamera capturePhotoAsImageProcessedUpToFilter:self.exposureFiler withCompletionHandler:^(UIImage *processedImage, NSError *error) {
                if(error){
                    return;
                }
                if (!exposureImage) {
                    exposureImage = processedImage;
                }else{
                    GPUImagePicture *stillImageSource1 = [[GPUImagePicture alloc] initWithImage:processedImage];
                    
                    GPUImagePicture *stillImageSource2 = [[GPUImagePicture alloc] initWithImage:exposureImage];
                    GPUImageLightenBlendFilter *blendFilter = [[GPUImageLightenBlendFilter alloc] init];
                    [stillImageSource1 processImage];
                    [stillImageSource1 addTarget:blendFilter];
                    [stillImageSource2 addTarget:blendFilter];
                    [stillImageSource2 processImage];
                    [blendFilter useNextFrameForImageCapture];
                    exposureImage = [blendFilter imageFromCurrentFramebuffer];
                    dispatch_async(dispatch_get_main_queue(), ^{
                        self.exposoursImageView.image = exposureImage;
                        BlockCallWithTwoArg(handler, @(60 - timeout), exposureImage);
                    });
//                    UIImageWriteToSavedPhotosAlbum(exposureImage, nil, nil, nil);
                }
                
            }];

        }
        timeout --;
    });
    dispatch_resume(self.timingTimer);
}
- (void)endMoreExposure
{
    if (_timingTimer) {
        dispatch_source_cancel(_timingTimer);
        _timingTimer = nil;
        [self.exposureFiler removeAllTargets];
        [self.videoCamera removeAllTargets];
        self.exposureFiler = nil;
        GPUImageFilter *filter = [[GPUImageFilter alloc] init]; //默认
        [self.imageFilter setInitialFilters:[NSArray arrayWithObject:filter]];
        [self.imageFilter setTerminalFilter:filter];
        [self.videoCamera addTarget:self.imageFilter];
        [self.imageFilter addTarget:self.cameraView];
        [self.imageFilter useNextFrameForImageCapture];
        if (self.exposoursImageView.image) {
        BOOL isDir;
        if (![FileManager fileExistsAtPath:kPhotoPath isDirectory:&isDir])
        {
            [FileManager createDirectoryAtPath:kPhotoPath withIntermediateDirectories:YES attributes:nil error:nil];
        }
        NSString *file = [NSString stringWithFormat:@"%@/%@", kPhotoPath, [self photoName]];
        NSError *errorhaha;
        [UIImageJPEGRepresentation(self.exposoursImageView.image, 1)  writeToURL:[NSURL fileURLWithPath:file] options:NSDataWritingAtomic error:&errorhaha];
        }
        [self.exposoursImageView removeFromSuperview];
        self.exposoursImageView = nil;
    }

}
#pragma mark 打开闪光灯
- (void)flashLight:(AVCaptureFlashMode)flashMode
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        if ([self.videoCamera.inputCamera isFlashModeSupported:flashMode]){//闪光灯
            [self.videoCamera.inputCamera setFlashMode:flashMode];
        }else{
            NSLog(@"该设备不支持闪关灯");
        }
    }else{
        NSLog(@"请打开相机");
    }
}
#pragma mark - 手电筒
- (void)torchLight:(AVCaptureTorchMode)torchMode
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        if ([self.videoCamera.inputCamera isTorchModeSupported:torchMode]){
            [self.videoCamera.inputCamera setTorchMode:torchMode];
        }else{
            NSLog(@"该设备不支持手电筒");
        }
    }else{
        NSLog(@"请打开相机");
    }
    
}
#pragma mark 设置hdr开关
-(void)setHDR:(BOOL)isOpen;
{
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        [self.videoCamera.inputCamera setAutomaticallyAdjustsVideoHDREnabled:!isOpen];
        if (self.videoCamera.inputCamera.activeFormat.isVideoHDRSupported) {
        [self.videoCamera.inputCamera setVideoHDREnabled:isOpen];
        }
//        [self.videoCamera.inputCamera.activeFormat setValue:@(isOpen) forKey:@"isVideoHDRSupported"];
//        [self.videoCamera.inputCamera setVideoHDREnabled:isOpen];
    }
}
#pragma mark 设置白平衡
-(void)whiteBlance
{
    NSLog(@"%d", publicModel.whiteBalance);
    NSError *error;
    if ([self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        float temperature = 0;
        
        switch (publicModel.whiteBalance) {
            case 0:
                temperature = 0;
                break;
            case 1:
                temperature = 2600;
                break;
            case 2:
                temperature = 4900;
                break;
            case 3:
                temperature = 6400;
                break;
            case 4:
                temperature = 6800;
                
                break;
            default:
                break;
        }
        float tint = 0;
        if (temperature == 0)
        {
            if ([self.videoCamera.inputCamera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance])
            {
                [self.videoCamera.inputCamera setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
            }
        }else
        {
            AVCaptureWhiteBalanceTemperatureAndTintValues temperatureAndTintValue ={
                temperature ,
                tint
            };
            AVCaptureWhiteBalanceGains whiteBalanceGains = [self.videoCamera.inputCamera deviceWhiteBalanceGainsForTemperatureAndTintValues:temperatureAndTintValue];
            [self.videoCamera.inputCamera setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:whiteBalanceGains completionHandler:^(CMTime syncTime) {
            }];
        }
    }
}
#pragma mark 设置视频分辨率
-(void)setVideoResolution:(YTVideoResolution)resolution
{
    publicModel.resolution = resolution;
    if (resolution == YTVideoResolution480)
    {
        if ([self.videoCamera.captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])
        {
            self.videoCamera.captureSession .sessionPreset = AVCaptureSessionPreset640x480;
        }
    }else if (resolution == YTVideoResolution720)
    {
        if ([self.videoCamera.captureSession  canSetSessionPreset:AVCaptureSessionPreset1280x720])
        {
            self.videoCamera.captureSession .sessionPreset = AVCaptureSessionPreset1280x720;
        }
    }else if (resolution == YTVideoResolution1080)
    {
        if ([self.videoCamera.captureSession  canSetSessionPreset:AVCaptureSessionPreset1920x1080])
        {
            self.videoCamera.captureSession .sessionPreset = AVCaptureSessionPreset1920x1080;
        }
    }else if (resolution == YTVideoResolution2160)
    {
        if ([self.videoCamera.captureSession  canSetSessionPreset:AVCaptureSessionPreset3840x2160])
        {
            self.videoCamera.captureSession .sessionPreset = AVCaptureSessionPreset3840x2160;
        }
    }
}
#pragma mark 设置视频分辨率的帧率
-(void)setVideoResolutionFps:(int)fps
{
//    self.videoCamera.frameRate = fps;
    
//    self.videoCamera.videoCaptureConnection.videoMinFrameDuration = CMTimeMake(1, 120);
//    self.videoCamera.videoCaptureConnection.videoMaxFrameDuration = CMTimeMake(1, 120);
//    return;
//
    //    self.videoCamera.videoCaptureConnection.videoMinFrameDuration = CMTimeMake(1, 120);
    //    self.videoCamera.videoCaptureConnection.videoMaxFrameDuration = CMTimeMake(1, 120);
    //    return;
    //
    NSError *error;
    //    CMTime frameDuration = CMTimeMake(1, 60);
    CMTime frameDuration = CMTimeMake(1, fps);
    NSArray *supportedFrameRateRanges = [self.videoCamera.inputCamera.activeFormat videoSupportedFrameRateRanges];
    BOOL frameRateSupported = NO;
    for (AVFrameRateRange *range in supportedFrameRateRanges)
    {
        NSLog(@"%lld==%d==%lld=%d==%lld==%d", frameDuration.value, frameDuration.timescale, range.minFrameDuration.value,range.minFrameDuration.timescale, range.maxFrameDuration.value, range.maxFrameDuration.timescale);
        if (CMTIME_COMPARE_INLINE(frameDuration, >=, range.minFrameDuration) &&
            CMTIME_COMPARE_INLINE(frameDuration, <=, range.maxFrameDuration))
        {
            frameRateSupported = YES;
        }
    }
    if (frameRateSupported && [self.videoCamera.inputCamera lockForConfiguration:&error])
    {
        [self.videoCamera.inputCamera setActiveVideoMaxFrameDuration:frameDuration];
        [self.videoCamera.inputCamera setActiveVideoMinFrameDuration:frameDuration];
        [self.videoCamera.inputCamera unlockForConfiguration];
    }
    
}
#pragma mark 设置视频滤镜
-(void)setVideoFilter:(YTVideoFilter)filter
{
    publicModel.videoFilter = filter;
    [self addBeauty:publicModel.isBeauty videlFilter:filter];
}
#pragma mark 慢动作
-(void)configureCameraForHighestFrameRate
{
    /**
     [慢动作设置](//https://developer.apple.com/library/mac/documentation/AVFoundation/Reference/AVCaptureDevice_Class/index.html),这里的技术原理实则以指定超过60fps去执行一个录制，比如240fps，然后将240fps按照30fps去写入数据填充时间段，所以就有了慢动作效果。下面的代码是设置最大速度。
     */
//    AVCaptureDeviceFormat *bestFormat = nil;
//    AVFrameRateRange *bestFrameRateRange = nil;
//    for(AVCaptureDeviceFormat *format in [self.videoCamera.inputCamera formats])
//    {
//        for ( AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
//        {
//            if ( range.maxFrameRate > bestFrameRateRange.maxFrameRate )
//            {
//                bestFormat = format;
//                bestFrameRateRange = range;
//            }
//        }
//    }
    if (publicModel.videoModel == 0) {
        [self configureCameraForHighestFrameRateClose];
    }else if (publicModel.videoModel == 1) {
        [self configureCameraForHighestFrameRateOpen];
    }
    //    AVCaptureDeviceFormat *bestFormat = nil;
    //    AVFrameRateRange *bestFrameRateRange = nil;
    //    for(AVCaptureDeviceFormat *format in [self.videoCamera.inputCamera formats])
    //    {
    //        for ( AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
    //        {
    //            if ( range.maxFrameRate > bestFrameRateRange.maxFrameRate )
    //            {
    //                bestFormat = format;
    //                bestFrameRateRange = range;
    //            }
    //        }
    //    }
    //    NSError *error;
    //    CMTime frameDuration = CMTimeMake(1, 240);
    //    NSArray *supportedFrameRateRanges = [self.videoCamera.inputCamera.activeFormat videoSupportedFrameRateRanges];
    //    BOOL frameRateSupported = NO;
    //    for (AVFrameRateRange *range in supportedFrameRateRanges) {
    //        if (CMTIME_COMPARE_INLINE(frameDuration, >=, range.minFrameDuration) &&
    //            CMTIME_COMPARE_INLINE(frameDuration, <=, range.maxFrameDuration)) {
    //            frameRateSupported = YES;
    //        }
    //    }
    //    CMSampleTimingInfo ref ={
    //    120,
    //    };
    //    [self.videoCamera processVideoSampleBuffer:<#(CMSampleBufferRef)#>];
    //    if (frameRateSupported ) {
    //        if ( [self.videoCamera.inputCamera lockForConfiguration:NULL] == YES ) {
    ////            self.videoCamera.inputCamera.activeFormat = bestFormat;
    //            self.videoCamera.inputCamera.activeVideoMinFrameDuration = frameDuration;
    //            self.videoCamera.inputCamera.activeVideoMaxFrameDuration = frameDuration;
    //            [self.videoCamera.inputCamera unlockForConfiguration];
    //        }
    //    }
}
- (void)configureCameraForHighestFrameRateOpen
{
    CGFloat desiredFPS = 240.0;
    [self.videoCamera.captureSession stopRunning];
    AVCaptureDevice *videoDevice = self.videoCamera.inputCamera;
    AVCaptureDeviceFormat *selectedFormat = nil;
    int32_t maxWidth = 0;
    AVFrameRateRange *frameRateRange = nil;
    for (AVCaptureDeviceFormat *format in [videoDevice formats]) {
        for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
            CMFormatDescriptionRef desc = format.formatDescription;
            CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
            int32_t width = dimensions.width;
            if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) {
                selectedFormat = format;
                frameRateRange = range;
                maxWidth = width;
            }
        }
    }
    if (selectedFormat) {
        if ([videoDevice lockForConfiguration:nil]) {
            NSLog(@"selected format: %@", selectedFormat);
            videoDevice.activeFormat = selectedFormat;
            videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)desiredFPS);
            videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)desiredFPS);
            [videoDevice unlockForConfiguration];
        }
    }
    [self.videoCamera.captureSession startRunning];

}
- (void)configureCameraForHighestFrameRateClose
{
    [self.videoCamera.captureSession stopRunning];
    CGFloat desiredFPS = 60.0;
    AVCaptureDevice *videoDevice = self.videoCamera.inputCamera;
    AVCaptureDeviceFormat *selectedFormat = nil;
    int32_t maxWidth = 0;
    AVFrameRateRange *frameRateRange = nil;
    for (AVCaptureDeviceFormat *format in [videoDevice formats]) {
        for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
            CMFormatDescriptionRef desc = format.formatDescription;
            CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
            int32_t width = dimensions.width;
            if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) {
                selectedFormat = format;
                frameRateRange = range;
                maxWidth = width;
            }
        }
    }
    if (selectedFormat) {
        if ([videoDevice lockForConfiguration:nil]) {
            NSLog(@"selected format: %@", selectedFormat);
            videoDevice.activeFormat = _defaultFormat;
            videoDevice.activeVideoMinFrameDuration = _defaultMinFrameDuration;
            videoDevice.activeVideoMaxFrameDuration = _defaultMaxFrameDuration;
            [videoDevice unlockForConfiguration];
        }
    }
    [self.videoCamera.captureSession startRunning];
    
}

#pragma mark 延时摄影
-(void)takeDelayPhoto
{
//    [self p_compressioVideo];
//    return;
    CGFloat total = publicModel.continueTime *60;
    __block CGFloat i = 0;
    
    self.delayTimer = [YTTools gcdTimer:publicModel.intervalTime];
    dispatch_source_set_event_handler(self.delayTimer, ^{
        i++;
        if (i > total)
        {
            dispatch_cancel(self.delayTimer);
            self.delayTimer = nil;
            [self p_compressioVideo];
        }
        [self.videoCamera capturePhotoAsJPEGProcessedUpToFilter:self.imageFilter withOrientation:UIImageOrientationUp withCompletionHandler:^(NSData *processedJPEG, NSError *error)
         {
             if (error){
                 NSLog(@"拍照出错了%@", error);
                 return; }
             BOOL isDir;
             if (![FileManager fileExistsAtPath:kPhotoDelayPath isDirectory:&isDir])
             {
                 [FileManager createDirectoryAtPath:kPhotoDelayPath withIntermediateDirectories:YES attributes:nil error:nil];
             }
             NSString *file = [NSString stringWithFormat:@"%@/%@", kPhotoDelayPath, [self photoName]];
             NSError *errorhaha;
             [processedJPEG writeToURL:[NSURL fileURLWithPath:file] options:NSDataWritingAtomic error:&errorhaha];
             //             UIImageWriteToSavedPhotosAlbum([[UIImage alloc] initWithData:processedJPEG], nil, nil, nil);
         }];
      });
    dispatch_resume(self.delayTimer);
}
-(void)p_compressioVideo
{
    [SVProgressHUD showWithStatus:Language(@"合成中")];
//    NSMutableArray *imageArray = [NSMutableArray array];
    NSError *fError = nil;
    NSArray *fileNameArr = [FileManager contentsOfDirectoryAtPath:kPhotoDelayPath error:&fError];
//    for (NSString *name in fileNameArr)
//    {
//        UIImage *image = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:@"%@%@", kPhotoDelayPath, name]];
//        if (image)
//        {
//            CGSize imagesize = image.size;
//            imagesize.height =408;
//            imagesize.width =306;
//            UIImage *newImage = [self imageWithImage:image scaledToSize:imagesize];
//            if (newImage)
//            {
//                [imageArray addObject:newImage];
//            }
//        }
//    }
    BOOL isDir;
    if (![FileManager fileExistsAtPath:kVideoPath isDirectory:&isDir])
    {
        [FileManager createDirectoryAtPath:kVideoPath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    CGSize size = CGSizeMake(320, 480);
    NSError *error = nil;
    NSURL *url = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", kVideoPath, [NSString stringWithFormat:@"delay%@",[self movieName]]]];
    NSLog(@"%@", url);
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie error:&error];
    NSParameterAssert(videoWriter);
    if (!error)
    {
        //mov的格式设置 编码格式 宽度 高度
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width], AVVideoWidthKey, [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
        AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
        NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
        //    AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
        //    可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
        //    是更有效的比添加像素缓冲区分配使用一个单独的池
        AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        if ([videoWriter canAddInput:writerInput])
        {
            [videoWriter addInput:writerInput];
            [videoWriter startWriting];
            [videoWriter startSessionAtSourceTime:kCMTimeZero];
            //合成多张图片为一个视频文件
            dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
            int __block frame =0;
            [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
                
                while([writerInput isReadyForMoreMediaData])
                {
                    UIImage *image = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:@"%@%@", kPhotoDelayPath, fileNameArr[frame]]];
                    CGSize imagesize = image.size;
                    imagesize.height =408;
                    imagesize.width =306;
                    UIImage *newImage = [self imageWithImage:image scaledToSize:imagesize];
                    if(++frame >=[fileNameArr count])
                    {
                        [writerInput markAsFinished];
                        [videoWriter finishWriting];
                        //[videoWriterfinishWritingWithCompletionHandler:nil];
                        break;
                    }
                    CVPixelBufferRef buffer =NULL;
//                    int idx =frame;
                    buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[newImage CGImage] size:size];
                    if (buffer)
                    {
                        if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,30)])//设置每秒钟播放图片的个数
                        {NSLog(@"FAIL");}else{NSLog(@"OK");}
                        CFRelease(buffer);
                    }
                }
                for (NSString *name in fileNameArr)
                {
                    NSString *filePath = [NSString stringWithFormat:@"%@%@", kPhotoDelayPath, name];
                    BOOL delete = [FileManager removeItemAtPath:filePath error:nil];
                    if (!delete)
                    {
                        NSLog(@"删除错误");
                    }
                }
                [SVProgressHUD dismiss];
            }];
        }
    }
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
    CVPixelBufferRef pxbuffer =NULL;
    CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
    
    NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer,0);
    
    void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata !=NULL);
    CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
    //    当你调用这个函数的时候，Quartz创建一个位图绘制环境，也就是位图上下文。当你向上下文中绘制信息时，Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定：每个组件的位数，颜色空间，alpha选项
    CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);
    
    //使用CGContextDrawImage绘制图片  这里设置不正确的话 会导致视频颠倒
    //    当通过CGContextDrawImage绘制图片到一个context中时，如果传入的是UIImage的CGImageRef，因为UIKit和CG坐标系y轴相反，所以图片绘制将会上下颠倒
    CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
    // 释放色彩空间
    CGColorSpaceRelease(rgbColorSpace);
    // 释放context
    CGContextRelease(context);
    // 解锁pixel buffer
    CVPixelBufferUnlockBaseAddress(pxbuffer,0);
    
    return pxbuffer;
}
-(UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize
{
    //    新创建的位图上下文 newSize为其大小
    UIGraphicsBeginImageContext(newSize);
    //    对图片进行尺寸的改变
    [image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
    //    从当前上下文中获取一个UIImage对象  即获取新的图片对象
    UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    // Return the new image.
    return newImage;
}
//先设置设备方向,在配置图片输出的时候需要用到
-(AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation result = (AVCaptureVideoOrientation)deviceOrientation;
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft)
    {
        result = AVCaptureVideoOrientationLandscapeRight;
    }else if ( deviceOrientation == UIDeviceOrientationLandscapeRight)
    {
        result = AVCaptureVideoOrientationLandscapeLeft;
    }
    return result;
}
#pragma mark - 内部处理方法
- (NSString *)movieName {
    NSDate *datenow = [NSDate date];
    NSString *timeSp = [NSString stringWithFormat:@"%ld", (long)[datenow timeIntervalSince1970]];
    return [timeSp stringByAppendingString:@".MOV"];
}
-(NSString *)photoName
{
    NSDate *datenow = [NSDate date];
    NSString *timeSp = [NSString stringWithFormat:@"%ld", (long)[datenow timeIntervalSince1970]];
    return [timeSp stringByAppendingString:@".jpeg"];
}
#pragma mark - 录制代理AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections {
    NSLog(@"开始录制");
}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
    NSLog(@"录制完成");
}
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
//    UIImage *image  = [self imageFromSampleBuffer:sampleBuffer];
//    dispatch_async(dispatch_get_main_queue(), ^{
//        self.exposoursImageView.image = image;
//
//    });
    
}
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    
    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    // Create a bitmap graphics context with the sample buffer data
////    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
//                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);
        CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    // Free up the context and color space
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    // Create an image object from the Quartz image
    //UIImage *image = [UIImage imageWithCGImage:quartzImage];
    UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight];
    
    // Release the Quartz image
    CGImageRelease(quartzImage);
    
    return (image);
}
@end
