//
//  HMCustomCamera.m
//  自定义相机工具类封装
//
//  Created by made on 17/3/15.
//  Copyright © 2017年 made. All rights reserved.
//

#import "HMCustomCamera.h"

#import <AVFoundation/AVFoundation.h>

/**
 #import <AVFoundation/AVCaptureDevice.h>
 #import <AVFoundation/AVCaptureInput.h>
 #import <AVFoundation/AVCaptureOutput.h>
 #import <AVFoundation/AVCaptureSession.h>
 #import <AVFoundation/AVCaptureVideoPreviewLayer.h>
 */

@interface HMCustomCamera ()

//拍摄会话
@property(nonatomic,strong)AVCaptureSession *captureSession;

//输入设备(摄像头/麦克风)
@property(nonatomic,strong)AVCaptureDeviceInput *captureDeviceInput;

//图像输出设备
@property(nonatomic,strong)AVCaptureStillImageOutput *captureStillImageOutput;

//预览图层
@property(nonatomic,strong)AVCaptureVideoPreviewLayer *previewLayer;


//预览视图
@property(nonatomic,strong)UIView *preView;

@end

@implementation HMCustomCamera

- (instancetype)initWithPreview:(UIView *)preView
{
    self = [super init];
    //保存预览视图
    self.preView = preView;
    //搭建拍摄会话
    [self setupSession];
    return self;
}

//搭建拍摄会话环境(四大核心类的创建)
- (void)setupSession
{
    //0创建硬件设备
   __block AVCaptureDevice *captureDvice;
    //获取当前设备支持的设备数组
    NSArray *arr = [AVCaptureDevice devices];
    //遍历数组,获取我们想要的设备(模拟器不支持硬件,需要用真机测试)
    [arr enumerateObjectsUsingBlock:^(id  _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
        AVCaptureDevice *device = (AVCaptureDevice *)obj;
        //通过判断设备的position属性来获取后置置摄像头
        if (device.position == AVCaptureDevicePositionBack) {
            captureDvice = device;
        }
    }];
    //1.创建输入设备
    self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDvice error:nil];
    //2.创建输出设备
    self.captureStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    //3.创建拍摄会话
    self.captureSession = [[AVCaptureSession alloc] init];
    //4.添加输入设备和输出设备
    //添加之前要判断能否添加设备(硬件损坏的情况下可能无法添加,模拟器也无法添加)
    if ([self.captureSession canAddInput:self.captureDeviceInput]) {
        [self.captureSession addInput:self.captureDeviceInput];
    }
    if ([self.captureSession canAddOutput:self.captureStillImageOutput]) {
        [self.captureSession addOutput:self.captureStillImageOutput];
    }
    
    //5.创建预览图层
    self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
    
    //6.将layer添加到视图,否则无法显示,为了避免预览视图遮盖其他视图,应该使用insertSublayer方法插入到layer图层的最底层
    [self.preView.layer insertSublayer:self.previewLayer atIndex:0];
    
    //layer需要通过bounds属性和position属性来设置大小,不设置大小默认为0无法显示
    self.previewLayer.bounds = self.preView.bounds;
    self.previewLayer.position = self.preView.center;
    
    //6.1  系统摄像头默认捕捉的图像是屏幕的大小,如果preview的大小与屏幕的大小不一致,系统默认会对图像做一个等比缩小居中的处理(类似于ViewContentMode中的AspectFit).此时就会导致预览视图中只有部分区域可以看到摄像头的图像
    //为了预览视图能够全部显示相机捕捉到的图像并且不变形,强烈建议设置预览图层的显示样式为AVLayerVideoGravityResizeAspectFill
    self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    
    //7.开启拍摄会话,让输入设备开始工作(摄像头开始捕捉图像)
    [self start];
}

//拍照功能(从图像输出中获取摄像头捕捉的原始图片)
- (void)captureImageCompletion:(void(^)(UIImage *image))completion SaveAlbumBlock:(void(^)(NSError *error))saveCompletion
{
    //0获取输出设备的连接数组(有且只有一个连接)
    NSArray *arr = self.captureStillImageOutput.connections;
    //1.创建输出设备与图像输出之间的连接
    AVCaptureConnection *connection = arr[0];
    //2.从图像输出中异步获取图片.参数是:输出设备与图像输出之前的连接
    [self.captureStillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
        
        if (error != nil) {
            NSLog(@"输出设备无法建立图像连接");
        }
        
        //这是主线程异步
        NSLog(@"========%d",[NSThread isMainThread]);
        //打印图像数据缓冲区,负责管理图像的内存环境,与OC中context类似(内部拥有与图像相关的所有数据,包含图像原始数据:镜头制造商(苹果),光圈距离,亮度,聚焦长度)
//        NSLog(@"%@",imageDataSampleBuffer);
        
        //使用图像缓冲区获取图像,注意这是一个类方法
       NSData *data =  [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
        
        //使用二进制创建图片
        UIImage *image = [UIImage imageWithData:data];
        
        //执行拍照block
        if (completion) {
            completion([self cutImageWithImage:image]);
        }
        
        //执行保存到相册
        if(saveCompletion)
        {
            //将图片保存到系统相册
            //在上下文中使用block可能会导致崩溃,这种代码的写法虽然逼格相当高而且也能体现技术,但是开发中不建议大家这么写.原因:别人不好维护(维护的人一般看不懂)
            UIImageWriteToSavedPhotosAlbum([self cutImageWithImage:image], self, @selector(image:didFinishSavingWithError:contextInfo:), (__bridge void * _Nullable)(saveCompletion));
        }
       

    }];
}

#pragma mark -裁切图片


/**
 裁切图片

 @param image 原始图片
 @return 裁切之后的图片
 */
- (UIImage *)cutImageWithImage:(UIImage *)image
{
    //1.开启上下文图片绘制,参数是想要裁切的大小,我们这里是要把原始全屏的图片裁切成预览视图大小的图片,所以传预览视图的大小
    UIGraphicsBeginImageContext(self.preView.frame.size);
    //2.获取当前上下文
    CGContextRef context = UIGraphicsGetCurrentContext();
    //3.开始绘制图片
    //计算裁切的高度,相机默认的输出图像是屏幕大小,我们应该裁切成预览视图的大小
    CGFloat width = [UIScreen mainScreen].bounds.size.width - self.preView.bounds.size.width;
    CGFloat height = [UIScreen mainScreen].bounds.size.height - self.preView.bounds.size.height;
    
    [image drawInRect:CGRectInset(self.preView.frame, -width*0.5, -height*0.5)];
    
    //在图片还未从上下文中获取之前送出消息通知,告知其他对象可以开始进行水印照片的绘制
    [[NSNotificationCenter defaultCenter] postNotificationName:kDidBenginDrawWaterImageNotification object:nil];
    
    //4.从上下文中获取裁切好的图片
    UIImage *resultImage = UIGraphicsGetImageFromCurrentImageContext();
    
    //5.关闭上下文
    UIGraphicsEndImageContext();
    
    
    return resultImage;
}

- (void)image:(UIImage *)image didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo
{
    void(^block)(NSError *error) = (__bridge void (^)(NSError *__strong))(contextInfo);
    
    block(error);

    
    if (error == nil) {
        NSLog(@"保存到系统相册成功");
    }
}


//切换摄像头
- (void)switchCamera
{
    //1.先获取当前摄像头的方向
    AVCaptureDevicePosition currentPosition = self.captureDeviceInput.device.position;
    //2创建硬件设备
    __block AVCaptureDevice *captureDvice;
    //获取当前设备支持的设备数组
    NSArray *arr = [AVCaptureDevice devices];
    //遍历数组,获取我们想要的设备(模拟器不支持硬件,需要用真机测试)
    [arr enumerateObjectsUsingBlock:^(id  _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
        AVCaptureDevice *device = (AVCaptureDevice *)obj;
        //获取当前摄像头方向的反方向的设备
        AVCaptureDevicePosition position = device.position;
        //如果当前摄像头是后置,则获取前置
        if (currentPosition == AVCaptureDevicePositionBack) {
            if(position == AVCaptureDevicePositionFront)
            {
                captureDvice = device;
            }
        }
        else//如果当前摄像头是前置,则获取后置
        {
            if(position == AVCaptureDevicePositionBack)
            {
                captureDvice = device;
            }
        }
    }];
    
    //3.创建新的输入设备
    AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDvice error:nil];
    
    //4.先移除旧的输入设备
    [self.captureSession removeInput:self.captureDeviceInput];
    
    //先停止拍摄会话的工作,可以避免切换摄像头时一个短暂的卡顿黑屏的现象
    //也可以做一个翻转动画来遮盖这个现象
    [self stop];
    
    //5.再添加新的输入设备,添加新的输入设备之前必须要移除现有的输入设备,否则无法添加
    if ([self.captureSession canAddInput:deviceInput]) {
        [self.captureSession addInput:deviceInput];
    }
    
    //6.保存新的输入设备
    self.captureDeviceInput = deviceInput;
    
    //开启会话
    [self start];

}


- (void)start
{
    [self.captureSession startRunning];
}


- (void)stop
{
    [self.captureSession stopRunning];
}

@end
