//
//  DetectViewController.m
//  CIRectangleDetectDemo
//
//  Created by fujikoli(李鑫) on 2017/8/29.
//  Copyright © 2017年 fujikoli(李鑫). All rights reserved.
//
//  核心实现类，
//  技术来源文章：https://blog.csdn.net/xx352890098/article/details/78440537
//  另一个框架：vision

#import <AVFoundation/AVFoundation.h>
#import <CoreImage/CoreImage.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "DetectViewController.h"
#import "MotionManager.h"

#define SafeAreaTopStatusHeight [UIApplication sharedApplication].statusBarFrame.size.height
#define ISAllSCREEN  (SafeAreaTopStatusHeight >= 44.0 ? YES : NO)
#define AllSCREEN_OFFSET 25.f

@interface Feature : NSObject

@property (nonatomic) CGPoint topLeft;
@property (nonatomic) CGPoint topRight;
@property (nonatomic) CGPoint bottomRight;
@property (nonatomic) CGPoint bottomLeft;

@end

@implementation Feature

@end

@interface DetectViewController ()<UIImagePickerControllerDelegate, UINavigationControllerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCapturePhotoCaptureDelegate>
{
    NSTimer *borderDetectKeeper;
    BOOL takePick;
    
    CGFloat deltaX;
    CGFloat deltaY;
    CGRect previewRect;
}
@property (nonatomic, strong) Camera *captureManager;//相机
@property (nonatomic, strong) RectView *rectView;//找到矩形后绘制框view
@property (nonatomic, strong) CIDetector *detector;//矩形识别核心类
@property (nonatomic, strong) UIView *cameraView;//拍摄画面view
@property (nonatomic, strong) UIView *focusView;//聚焦框view
@property (nonatomic, strong) UITapGestureRecognizer *tapGesture;//点击聚焦点
@property (nonatomic,strong) UIView * limeteV;//矩形限制内区域
@property (nonatomic,assign) CGRect imageCutFrame;//手动裁减矩形图片坐标
@property (nonatomic,strong) NSMutableDictionary* rectCoords;//自动裁减矩形图片坐标集
@property (nonatomic,strong) MotionManager*motion;//手机方向监测
@property (nonatomic,assign) BOOL isFlatwise;//手机是否正面朝上放平了
@property (nonatomic,strong) UILabel*testL;//条件不足提示
@property (nonatomic,strong) UILabel*testL2;//条件不足提示
@property (nonatomic,strong) UIImageView*resultImageV;
@end

@implementation DetectViewController

- (void)viewDidLoad
{
    [super viewDidLoad];
    [self.navigationController setNavigationBarHidden:YES];
    
    if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera])
    {
        
        self.cameraView = [[UIView alloc] init];
        self.cameraView.backgroundColor = [UIColor blackColor];
        self.cameraView.frame = self.view.bounds;
        [self.view addSubview:self.cameraView];
        
        self.rectView = [[RectView alloc] init];
        self.rectView.frame = self.cameraView.frame;
        self.rectView.hidden = YES;
        [self.view addSubview:self.rectView];
        
        _tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(focusGesture:)];
        [self.view addGestureRecognizer:_tapGesture];
        
        _focusView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, 80, 80)];
        _focusView.layer.borderWidth = 1.0;
        _focusView.layer.borderColor =[UIColor greenColor].CGColor;
        _focusView.backgroundColor = [UIColor clearColor];
        [self.view addSubview:_focusView];
        _focusView.hidden = YES;
        
        self.captureManager = [[Camera alloc]init];
        [self.captureManager actionWithBufferDelegate:self previewLayerFrame:_cameraView.bounds];
        [_cameraView.layer addSublayer:_captureManager.previewLayer];
        //开始监测设备方向
        __weak typeof(self) weakSelf = self;
        [self.motion startWithResult:^(BOOL flatwise) {
            weakSelf.isFlatwise = flatwise;
            if (!flatwise) {
                weakSelf.testL2.hidden=NO;
                weakSelf.testL2.text = @"手机没放平不抓拍";
            }else{
                weakSelf.testL2.hidden=YES;
            }
        }];
        
        UIButton* _cancelButton = [[UIButton alloc]initWithFrame:CGRectMake(50, self.view.bounds.size.height-70, 60, 50)];
        [_cancelButton setTitle:@"反裁减" forState:0];
        _cancelButton.backgroundColor = [UIColor blackColor];
        [_cancelButton addTarget:self action:@selector(newCaiJian) forControlEvents:UIControlEventTouchUpInside];
//        [self.view addSubview:_cancelButton];
        
    }
    CGFloat space = 20.f;
    self.limeteV = [[UIView alloc]initWithFrame:CGRectMake(space, space, self.cameraView.bounds.size.width-2*space, self.cameraView.bounds.size.height-2*space)];
    self.limeteV.layer.borderWidth = 2.f;
    self.limeteV.layer.borderColor = [UIColor redColor].CGColor;
    [self.view addSubview:self.limeteV];
}

//拍照
-(void)takePicture{
    if(takePick==NO){
        takePick = YES;
        [_captureManager takePictureWith:self];
    }
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    [self.captureManager startRunning];
}

- (void)viewWillDisappear:(BOOL)animated
{
    [super viewWillDisappear:animated];
    [self.captureManager captureStopRunning];
}

- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker
{
    [self dismissController];
}

- (void)dealloc
{
    [_motion stop];
    _captureManager = nil;
}

- (void)dismissController
{
    [self.captureManager captureStopRunning];
}

- (void)startOverlayHideTimer
{
    if(borderDetectKeeper) {
        [borderDetectKeeper invalidate];
    }
    
//    borderDetectKeeper = [NSTimer scheduledTimerWithTimeInterval:0.06
//                                                          target:self
//                                                        selector:@selector(removeBoundingBox:)
//                                                        userInfo:nil
//                                                         repeats:NO];
}

- (void)focusGesture:(UITapGestureRecognizer*)gesture{
    CGPoint point = [gesture locationInView:gesture.view];
    [self focusAtPoint:point];
}
//点击聚焦
- (void)focusAtPoint:(CGPoint)point{
    CGSize size = self.cameraView.bounds.size;
    [self.captureManager focusAt:size point:point];
    
    _focusView.center = point;
    _focusView.hidden = NO;
    [UIView animateWithDuration:0.3 animations:^{
        _focusView.transform = CGAffineTransformMakeScale(1.25, 1.25);
    }completion:^(BOOL finished) {
        [UIView animateWithDuration:0.5 animations:^{
            _focusView.transform = CGAffineTransformIdentity;
        } completion:^(BOOL finished) {
            _focusView.hidden = YES;
        }];
    }];
}

///AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    //扫描抓拍回调
    @autoreleasepool{
        CFDictionaryRef metadataDict = CMCopyDictionaryOfAttachments(NULL,sampleBuffer, kCMAttachmentMode_ShouldPropagate);
        NSDictionary *metadata = [[NSMutableDictionary alloc]initWithDictionary:(__bridge NSDictionary*)metadataDict];
        NSDictionary *exifMetadata = [metadata objectForKey:(NSString *) kCGImagePropertyExifDictionary];
        CGFloat brightnessValue = [[exifMetadata objectForKey:(NSString *) kCGImagePropertyExifBrightnessValue] floatValue];
        CFRelease(metadataDict);
        dispatch_async(dispatch_get_main_queue(), ^{
            if(brightnessValue <= 2.0){
                self.testL.hidden=NO;
                self.testL.text = @"光照不足不抓拍";
            }else{
                self.testL.hidden=YES;
            }
        });
        if(brightnessValue <= 2.0){
            return;
        }
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
        CIFilter *transform = [CIFilter filterWithName:@"CIAffineTransform"];
        [transform setValue:image forKey:kCIInputImageKey];
        //旋转下照片
        NSValue *rotation = [NSValue valueWithCGAffineTransform:CGAffineTransformMakeRotation(-90 * (M_PI/180))];
        [transform setValue:rotation forKey:@"inputTransform"];
        image = [transform outputImage];
        dispatch_async(dispatch_get_main_queue(), ^{
            [self showRectangleView:image];
        });
    }
}

-(void)showRectangleView:(CIImage*)image{
    self.limeteV.layer.borderColor=[UIColor redColor].CGColor;
    
    Feature *rectangleFeature = [self biggestRectangleInRectangles:[self.detector featuresInImage:image]];
    if(rectangleFeature){
        previewRect = self.cameraView.frame;//{{0, 0}, {375, 812}}
        CGRect imageRect = image.extent;//{{0, -1920}, {1080, 1920}}
        deltaX = CGRectGetWidth(previewRect)/CGRectGetWidth(imageRect);
        deltaY = CGRectGetHeight(previewRect)/CGRectGetHeight(imageRect);
        //将图像坐标系转为UIKit坐标系 将坐标沿着y轴对称过去，上次调用CGAffineTransformMakeTranslation(100,100);将view(泛指)移动到相对于“屏幕的左上角”的(100,100)点(说白了,就是屏幕上的(100, 100)点), 再次调用CGAffineTransformMakeTranslation(20,40);这次我们把view移动到的是(20, 40),而不是相对(100, 100)移动(20, 40)后的(120,140)
        CGAffineTransform transform2 = CGAffineTransformMakeTranslation(0.f, CGRectGetHeight(previewRect));//实现以初始位置为基准,在x轴方向上平移x单位,在y轴方向上平移y单位
        transform2 = CGAffineTransformScale(transform2, 1, -1);//以一个已经存在的形变为基准,在x轴方向上缩放x倍,在y轴方向上缩放y倍
        //按照cameraview的scale调整
        transform2 = CGAffineTransformScale(transform2, deltaX, deltaY);
        
        CGPoint points[4];
        //CGPointApplyAffineTransform()某点通过矩阵变换之后的点
        points[0] = CGPointApplyAffineTransform(rectangleFeature.topLeft, transform2);
        points[1] = CGPointApplyAffineTransform(rectangleFeature.topRight, transform2);
        points[2] = CGPointApplyAffineTransform(rectangleFeature.bottomRight, transform2);
        points[3] = CGPointApplyAffineTransform(rectangleFeature.bottomLeft, transform2);
//        NSLog(@"我要的数据2==tl%@==tr%@==bl%@==br%@",NSStringFromCGPoint(points[0]),NSStringFromCGPoint(points[1]),NSStringFromCGPoint(points[3]),NSStringFromCGPoint(points[2]));
        if(ISAllSCREEN){
            //全面屏调节
            points[0] = CGPointMake(points[0].x - AllSCREEN_OFFSET, points[0].y);
            points[3] = CGPointMake(points[3].x - AllSCREEN_OFFSET, points[3].y);
            points[1] = CGPointMake(points[1].x + AllSCREEN_OFFSET, points[1].y);
            points[2] = CGPointMake(points[2].x + AllSCREEN_OFFSET, points[2].y);
        }
        _rectView.hidden = NO;
        [_rectView drawWithPointsfirst:points[0]
                                second:points[1]
                                 thrid:points[2]
                                 forth:points[3]];
        //判断四个点是否都在限制的区域内
        if(CGRectContainsPoint(self.limeteV.frame, points[0])
           && CGRectContainsPoint(self.limeteV.frame, points[1])
           && CGRectContainsPoint(self.limeteV.frame, points[2])
           && CGRectContainsPoint(self.limeteV.frame, points[3])){
            
            self.imageCutFrame = CGRectMake(points[0].x, points[0].y, MAX((points[1].x - points[0].x), (points[2].x - points[3].x)), MAX((points[3].y-points[0].y), (points[2].y - points[1].y)));
            self.limeteV.layer.borderColor=[UIColor yellowColor].CGColor;
            if(self.isFlatwise){
                //如果手机是正面朝上平放才开启拍照
                [self takePicture];
            }
        }
        //更新视图
        [self.rectView setNeedsDisplay];
        [self startOverlayHideTimer];
    }else{
        //更新视图
        [self.rectView setNeedsDisplay];
        [self startOverlayHideTimer];
    }
}
-(UIImageView *)resultImageV{
    if(!_resultImageV){
        _resultImageV = [[UIImageView alloc]initWithFrame:self.cameraView.bounds];
        _resultImageV.contentMode = UIViewContentModeScaleAspectFill;
        _resultImageV.backgroundColor=[UIColor grayColor];
        _resultImageV.hidden = YES;
        [self.cameraView addSubview:_resultImageV];
    }
    return _resultImageV;
}
- (CIDetector *)detector{
    if(!_detector){
        //CIDetectorAccuracy 识别精度
        //CIDetectorMinFeatureSize 指定最小尺寸的检测器，小于这个尺寸的特征将不识别，CIDetectorTypeFace(0.01 ~ 0.50)，CIDetectorTypeText(0.00 ~ 1.00)，CIDetectorTypeRectangle(0.00 ~ 1.00)
        //CIDetectorAspectRatio 指定检测到的矩形的长宽比
        _detector = [CIDetector detectorOfType:CIDetectorTypeRectangle context:nil options:@{CIDetectorAccuracy : CIDetectorAccuracyHigh,CIDetectorAspectRatio:@1.5,CIDetectorMaxFeatureCount: @1}];
    }
    return _detector;
}
-(NSMutableDictionary *)rectCoords{
    if(!_rectCoords){
        _rectCoords = [[NSMutableDictionary alloc] init];
    }
    return _rectCoords;
}
-(MotionManager *)motion{
    if(!_motion){
        _motion = [[MotionManager alloc]init];
    }
    return _motion;
}
-(UILabel *)testL{
    if(!_testL){
        _testL = [[UILabel alloc]initWithFrame:CGRectMake(50, [UIScreen mainScreen].bounds.size.height - 100, 200, 30)];
        _testL.textColor=[UIColor whiteColor];
        _testL.backgroundColor=[UIColor blackColor];
        _testL.numberOfLines = 0;
        _testL.hidden=YES;
        [self.view addSubview:_testL];
    }
    return _testL;
}
-(UILabel *)testL2{
    if(!_testL2){
        _testL2 = [[UILabel alloc]initWithFrame:CGRectMake(50, [UIScreen mainScreen].bounds.size.height - 50, 200, 30)];
        _testL2.textColor=[UIColor whiteColor];
        _testL2.backgroundColor=[UIColor blackColor];
        _testL2.numberOfLines = 0;
        _testL2.hidden=YES;
        [self.view addSubview:_testL2];
    }
    return _testL2;
}

- (CIRectangleFeature *)_biggestRectangleInRectangles:(NSArray *)rectangles
{
    if (![rectangles count]) return nil;
    
    float halfPerimiterValue = 0;
    
    CIRectangleFeature *biggestRectangle = [rectangles firstObject];
    
    for (CIRectangleFeature *rect in rectangles)
    {
        CGPoint p1 = rect.topLeft;
        CGPoint p2 = rect.topRight;
        //hypotf()计算两个值的平方的和再开方，两点到原点的距离
        CGFloat width = hypotf(p1.x - p2.x, p1.y - p2.y);
        
        CGPoint p3 = rect.topLeft;
        CGPoint p4 = rect.bottomLeft;
        CGFloat height = hypotf(p3.x - p4.x, p3.y - p4.y);
        
        CGFloat currentHalfPerimiterValue = height + width;
        
        if (halfPerimiterValue < currentHalfPerimiterValue)
        {
            halfPerimiterValue = currentHalfPerimiterValue;
            biggestRectangle = rect;
        }
    }
    
    return biggestRectangle;
}

- (Feature *)biggestRectangleInRectangles:(NSArray *)rectangles
{
    //选出最大的矩形
    CIRectangleFeature *rectangleFeature = [self _biggestRectangleInRectangles:rectangles];
    if (!rectangleFeature) return nil;
    //rectangleFeature四个点的坐标x坐标与view的是同向的， y坐标与view的是反向的
    CGFloat padding = 10.f;
    CGPoint bottomLeft = CGPointMake(rectangleFeature.bottomLeft.x-padding, rectangleFeature.bottomLeft.y-padding);
    CGPoint bottomRight = CGPointMake(rectangleFeature.bottomRight.x+padding, rectangleFeature.bottomRight.y-padding);
    CGPoint topLeft = CGPointMake(rectangleFeature.topLeft.x-padding, rectangleFeature.topLeft.y+padding);
    CGPoint topRight = CGPointMake(rectangleFeature.topRight.x+padding, rectangleFeature.topRight.y+padding);
    self.rectCoords[@"inputTopLeft"] = [CIVector vectorWithCGPoint:topLeft];
    self.rectCoords[@"inputTopRight"] = [CIVector vectorWithCGPoint:topRight];
    self.rectCoords[@"inputBottomLeft"] = [CIVector vectorWithCGPoint:bottomLeft];
    self.rectCoords[@"inputBottomRight"] = [CIVector vectorWithCGPoint:bottomRight];
    
    //顶点坐标转换为view参考系
    NSArray *points = @[[NSValue valueWithCGPoint:rectangleFeature.topLeft],[NSValue valueWithCGPoint:rectangleFeature.topRight],[NSValue valueWithCGPoint:rectangleFeature.bottomLeft],[NSValue valueWithCGPoint:rectangleFeature.bottomRight]];
    
    CGPoint min = [points[0] CGPointValue];
    CGPoint max = min;
    for (NSValue *value in points)
    {
        CGPoint point = [value CGPointValue];
        //fminf()返回两个浮点数中较小的那个，值是精确判断
        min.x = fminf(point.x, min.x);
        min.y = fminf(point.y, min.y);
        max.x = fmaxf(point.x, max.x);
        max.y = fmaxf(point.y, max.y);
    }
    
    CGPoint center =
    {
        0.5f * (min.x + max.x),
        0.5f * (min.y + max.y),
    };
    
    NSNumber *(^angleFromPoint)(id) = ^(NSValue *value)
    {
        CGPoint point = [value CGPointValue];
        CGFloat theta = atan2f(point.y - center.y, point.x - center.x);
        CGFloat angle = fmodf(M_PI - M_PI_4 + theta, 2 * M_PI);//计算第一个指定参数除以第二个指定参数的余数
        return @(angle);
    };
    
    NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b)
                             {
                                 return [angleFromPoint(a) compare:angleFromPoint(b)];
                             }];
    
    Feature *featureMutable = [Feature new];
    featureMutable.topLeft = [sortedPoints[3] CGPointValue];//bottomRight
    featureMutable.topRight = [sortedPoints[2] CGPointValue];//bottomLeft
    featureMutable.bottomRight = [sortedPoints[1] CGPointValue];//topRight
    featureMutable.bottomLeft = [sortedPoints[0] CGPointValue];//topLeft
//    NSLog(@"我要的数据1==tl%@==tr%@==bl%@==br%@",NSStringFromCGPoint(featureMutable.topLeft),NSStringFromCGPoint(featureMutable.topRight),NSStringFromCGPoint(featureMutable.bottomLeft),NSStringFromCGPoint(featureMutable.bottomRight));
    return featureMutable;
}

///AVCapturePhotoCaptureDelegate
-(void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error{
    
    if (photoSampleBuffer) {
        NSData *data = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
        dispatch_async(dispatch_get_main_queue(), ^{
            self.resultImageV.hidden=NO;
            self.resultImageV.image = [self getCutImage:[DetectViewController fixOrientation:[UIImage imageWithData:data]]];
            [self.rectView customPan];
            NSData*data = UIImageJPEGRepresentation(self.resultImageV.image, 1.0);
            UILabel*sizeL = [[UILabel alloc]initWithFrame:CGRectMake(50, 60, 70, 30)];
            [self.resultImageV addSubview:sizeL];
            sizeL.textColor=[UIColor blackColor];
            sizeL.text = [NSString stringWithFormat:@"%.2fMB",data.length/1024.f/1024.f];
        });
    }
}
-(void)newCaiJian{
    Feature *orgFea = [[Feature alloc]init];
    orgFea.topLeft = [self.rectView getTLpoint];
    orgFea.topRight = [self.rectView getTRpoint];
    orgFea.bottomRight = [self.rectView getBRpoint];
    orgFea.bottomLeft = [self.rectView getBLpoint];
    //////坐标转换不可用状态//////////
    CGAffineTransform transform2 = CGAffineTransformMakeTranslation(0.f, -CGRectGetHeight(previewRect));
    transform2 = CGAffineTransformScale(transform2, 1, -1);
    transform2 = CGAffineTransformScale(transform2, 1.f/deltaX, 1.f/deltaY);
    orgFea.topLeft = CGPointApplyAffineTransform(orgFea.topLeft, transform2);
    orgFea.topRight = CGPointApplyAffineTransform(orgFea.topRight, transform2);
    orgFea.bottomRight = CGPointApplyAffineTransform(orgFea.bottomRight, transform2);
    orgFea.bottomLeft = CGPointApplyAffineTransform(orgFea.bottomLeft, transform2);
    
    self.rectCoords[@"inputTopLeft"] = [CIVector vectorWithCGPoint:orgFea.bottomLeft];
    self.rectCoords[@"inputTopRight"] = [CIVector vectorWithCGPoint:orgFea.bottomRight];
    self.rectCoords[@"inputBottomLeft"] = [CIVector vectorWithCGPoint:orgFea.topRight];
    self.rectCoords[@"inputBottomRight"] = [CIVector vectorWithCGPoint:orgFea.topLeft];
    self.resultImageV.image = [self getCutImage:self.resultImageV.image];
        
}
//照片修正
+ (UIImage *)fixOrientation:(UIImage*)orgImage{
    
    // No-op if the orientation is already correct
    if (orgImage.imageOrientation == UIImageOrientationUp) return orgImage;
    
    UIGraphicsBeginImageContextWithOptions(orgImage.size, NO, orgImage.scale);
    [orgImage drawInRect:(CGRect){0, 0, orgImage.size}];
    UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return normalizedImage;
}

/**
 裁剪照片
 
 @return 图片
 */
-(UIImage *)getCutImage:(UIImage*)_originalImage{
    CIImage *testCIImage = [[CIImage imageWithCGImage:_originalImage.CGImage] imageByApplyingFilter:@"CIPerspectiveCorrection" withInputParameters:self.rectCoords];

    UIImage*cImage = [UIImage imageWithCIImage:testCIImage];
    return cImage;
    //算出截图位置相对图片的坐标
//    CGRect rect = self.imageCutFrame;//[self.view convertRect:self.cutFrame toView:self.view];
//    CGFloat scaleWidth = _originalImage.size.width / _cameraView.bounds.size.width ;
//    CGFloat scaleHeight = _originalImage.size.height / _cameraView.bounds.size.height;
//    CGFloat offset = 0.f;
//    if(ISAllSCREEN){
//        offset = AllSCREEN_OFFSET*2;
//    }
//    CGRect myImageRect= CGRectMake(rect.origin.x * scaleWidth + offset, rect.origin.y * scaleHeight, rect.size.width * scaleWidth - offset*2, rect.size.height * scaleHeight);
//
//    CGImageRef imagess = _originalImage.CGImage;
//    CGImageRef subImageRef = CGImageCreateWithImageInRect(imagess, myImageRect);
//    UIImage* smallImage = [UIImage imageWithCGImage:subImageRef];
//    //释放资源
//    CGImageRelease(subImageRef);
//
//    return smallImage;
}
//压缩照片
static size_t getAssetBytesCallback(void *info, void *buffer, off_t position, size_t count)
{

ALAssetRepresentation *rep = (__bridge id)info;

NSError *error = nil;

size_t countRead = [rep getBytes:(uint8_t *)buffer fromOffset:position length:count error:&error];

if (countRead == 0 && error) {

// We have no way of passing this info back to the caller, so we log it, at least.

NSLog(@"thumbnailForAsset:maxPixelSize: got an error reading an asset: %@",
error);

}

return countRead;

}

static void releaseAssetCallback(void *info) {

// The info here is an ALAssetRepresentation which we CFRetain in thumbnailForAsset:maxPixelSize:.

// This release balances that retain.

CFRelease(info);

}

//压缩图片-占用内存小，NSData *imageData = UIImageJPEGRepresentation(image, percent);这样占用内存大
- (UIImage *)thumbnailForAsset:(ALAsset *)asset maxPixelSize:(NSUInteger)size{
    NSParameterAssert(asset != nil);

    NSParameterAssert(size > 0);

    ALAssetRepresentation *rep = [asset defaultRepresentation];

    CGDataProviderDirectCallbacks callbacks ={
        .version = 0,

        .getBytePointer = NULL,

        .releaseBytePointer = NULL,

        .getBytesAtPosition = getAssetBytesCallback,

        .releaseInfo = releaseAssetCallback,
    };

    CGDataProviderRef provider = CGDataProviderCreateDirect((void *)CFBridgingRetain(rep),
    [rep size], &callbacks);

    CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);

    CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(source, 0,
    (__bridge CFDictionaryRef)@{
        (NSString *)kCGImageSourceCreateThumbnailFromImageAlways: @YES,
        (NSString *)kCGImageSourceThumbnailMaxPixelSize :[NSNumber numberWithInt:size],
        (NSString *)kCGImageSourceCreateThumbnailWithTransform :@YES}
                                                              );

    CFRelease(source);

    CFRelease(provider);

    if (!imageRef) {
        return nil;
    }

    UIImage *toReturn = [UIImage imageWithCGImage:imageRef];

    CFRelease(imageRef);

    return toReturn;
}
@end

