//
//  CIDetector+UIGraphic+ViewController.swift
//  CPT3
//
//  Created by onemade on 12/9/15.
//  Copyright © 2015 CNPC. All rights reserved.
//

import UIKit

class CIDetector_UIGraphic_ViewController: UIViewController {
    @IBOutlet weak var inputImageView: UIImageView!
    @IBOutlet weak var outputImageView: UIImageView!

    @IBAction func detect(sender: UIButton) {
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {
            let context = CIContext()
            let image = CIImage(CGImage: self.inputImageView.image!.CGImage!)
            let param = [CIDetectorAccuracyHigh: CIDetectorAccuracy]
            let detector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: param)
            let features = detector.featuresInImage(image) as! [CIFaceFeature]
            
            dispatch_async(dispatch_get_main_queue(), {
                self.drawImageAnnotatedWithFeatures(features)
                if features.count > 0{
                    let faceImage = image.imageByCroppingToRect(features[0].bounds)
                    let face = UIImage(CGImage: context.createCGImage(faceImage, fromRect: faceImage.extent))
                    self.outputImageView.image = face
                }
            })
        })
    }
    
    func drawImageAnnotatedWithFeatures(features: [CIFaceFeature]){
        let img = self.inputImageView.image
        
        UIGraphicsBeginImageContextWithOptions(img!.size, true, 1)
        img?.drawInRect(self.inputImageView.bounds)
        let context = UIGraphicsGetCurrentContext()
        
        CGContextTranslateCTM(context, 0, img!.size.height)
        CGContextScaleCTM(context, 1, -1)
        let scale = UIScreen.mainScreen().scale
//        if scale > 1 {
//            CGContextScaleCTM(context, 0.5, 0.5)
//        }
        
        for feature in features {
            CGContextSetRGBFillColor(context, 0.0, 0.0, 0.0, 0.5);
            CGContextSetStrokeColorWithColor(context, UIColor.whiteColor().CGColor);
            CGContextSetLineWidth(context, 2.0 * scale);
            CGContextAddRect(context, feature.bounds);
            CGContextDrawPath(context, .FillStroke);
            
            CGContextSetRGBFillColor(context, 1.0, 0.0, 0.0, 0.4);
            
            if (feature.hasLeftEyePosition) {
                self.drawFeatureIn(context!, atPoint: feature.leftEyePosition)
            }

            if (feature.hasRightEyePosition) {
                self.drawFeatureIn(context!, atPoint: feature.rightEyePosition)
            }
            
            if (feature.hasMouthPosition) {
                self.drawFeatureIn(context!, atPoint: feature.mouthPosition)
            }
            self.inputImageView.image = UIGraphicsGetImageFromCurrentImageContext()
            UIGraphicsEndImageContext()
        }
    }
    
    func drawFeatureIn(context: CGContextRef, atPoint featurePoint:CGPoint){
        let radius = 5*UIScreen.mainScreen().scale
        CGContextAddArc(context, featurePoint.x, featurePoint.y, radius, 0, CGFloat(M_PI * 2), 1)
        CGContextDrawPath(context, .FillStroke)
    }
}
