//
//  QMLibManager.m
//  QMCamera
//
//  Created by guoqingwei on 16/7/20.
//  Copyright © 2016年 cvte. All rights reserved.
//

#import "QMLibManager.h"
#import "SLCameraHelper.h"
#import "HexinPaper.h"

@interface QMLibManager () <SLCameraFrameCaptureDelegate>{
    HexinPaper *hexinLib;
    NSArray *paper;
}

@property (nonatomic, strong) NSTimer *qmTimer;
@property (nonatomic) dispatch_queue_t qm_queue;
@property (nonatomic, strong) SLCameraHelper *cameraHelper;

@end

@implementation QMLibManager

+ (QMLibManager *)sharedInstance
{
    static QMLibManager *sharedInstance = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        sharedInstance = [[QMLibManager alloc] init];
    });
    return sharedInstance;
}

- (id)init
{
    if (self = [super init]) {
        self.qm_queue = dispatch_queue_create("com.cvte.QMDemo", DISPATCH_QUEUE_SERIAL);
        self.cameraHelper = [SLCameraHelper sharedInstance];
        self.cameraHelper.captureDelegate = self;
        paper = nil;
        hexinLib = [[HexinPaper alloc] init];
    }
    return self;
}

#pragma mark - Public methods

- (void)startDetect:(CGFloat)interval;
{
    dispatch_async(self.qm_queue, ^{
        self.qmTimer = [NSTimer timerWithTimeInterval:interval target:self selector:@selector(doDetect) userInfo:nil repeats:YES];
        [[NSRunLoop currentRunLoop] addTimer:self.qmTimer forMode:NSRunLoopCommonModes];
        [[NSRunLoop currentRunLoop] run];
    });
}

- (void)stopDetect
{
    [self.qmTimer invalidate];
    self.qmTimer = nil;
}

//返回了一个例子
//这里需要调用启明的Lib进行实际处理
- (NSArray *)detectPaperInImage:(UIImage *)imageFrame
{
    NSTimeInterval begin = [NSDate timeIntervalSinceReferenceDate];
    
    paper = [hexinLib detectPaper:imageFrame withLastFramePaper:paper];
    
//    NSValue *v1 = [NSValue valueWithCGPoint:CGPointMake(80, 40)];
//    NSValue *v2 = [NSValue valueWithCGPoint:CGPointMake(200, 32)];
//    NSValue *v3 = [NSValue valueWithCGPoint:CGPointMake(210, 300)];
//    NSValue *v4 = [NSValue valueWithCGPoint:CGPointMake(70, 312)];
//    NSArray *points = @[v1, v2, v3, v4];

    NSTimeInterval end = [NSDate timeIntervalSinceReferenceDate];
    NSLog(
          @"detectPaper time:%f, count=%lu, image size: %f, %f",
          end - begin, (unsigned long)[paper count],
          [imageFrame size].width, [imageFrame size].height
          );
   
    return paper;
//    return points;
}

//原图返回
//这里需要调用启明的Lib进行实际处理
- (UIImage *)enhanceWhiteBoard:(UIImage *)whiteBoardImage
{
    NSLog(@"raw image size: %f, %f", [whiteBoardImage size].width, [whiteBoardImage size].height);
    NSTimeInterval begin = [NSDate timeIntervalSinceReferenceDate];
    paper = [hexinLib detectPaper:whiteBoardImage withLastFramePaper:paper];
    UIImage *resultImage = [hexinLib enhanceWhiteboard:nil withPaper:paper];
//    UIImage *resultImage = whilteBoardImage;
    NSTimeInterval end = [NSDate timeIntervalSinceReferenceDate];
    NSLog(@"enhance WhiteBoard time:%f, enhanced image size: %f, %f", end - begin,
          [resultImage size].width, [resultImage size].height);
    
    
    if ([self.delegate respondsToSelector:@selector(libManager:didReceiveEnhanceResult:)]) {
        [self.delegate libManager:self didReceiveEnhanceResult:resultImage];
    }
    return resultImage;
}

//原图返回
//这里需要调用启明的Lib行实际处理
- (UIImage *)enhanceCommonImage:(UIImage *)commonImage
{
    NSTimeInterval begin = [NSDate timeIntervalSinceReferenceDate];
    paper = [hexinLib detectPaper:commonImage withLastFramePaper:paper];
    UIImage *resultImage = [hexinLib enhancePhoto:nil withPaper:paper];
//    UIImage *resultImage = commonImage;
    NSTimeInterval end = [NSDate timeIntervalSinceReferenceDate];
    NSLog(@"enhance CommonImage time:%f", end - begin);
    
    
    if ([self.delegate respondsToSelector:@selector(libManager:didReceiveEnhanceResult:)]) {
        [self.delegate libManager:self didReceiveEnhanceResult:resultImage];
    }
    return resultImage;
}

- (void)doDetect
{
    [self.cameraHelper captureImageFrameWithOrientation:[UIDevice currentDevice].orientation cropSize:self.detectSize];
}

#pragma mark - SLCameraFrameCaptureDelegate

- (void)cameraDidCaptureFrame:(UIImage *)imageFrame
{
    NSArray *points = [self detectPaperInImage:imageFrame];
    if (points) {
        if ([self.delegate respondsToSelector:@selector(libManager:didSuccessDetectPoints:)]) {
            [self.delegate libManager:self didSuccessDetectPoints:points];
        }
    } else {
        if ([self.delegate respondsToSelector:@selector(libManagerDidFailDetect:)]) {
            [self.delegate libManagerDidFailDetect:self];
        }
    }
}

@end
