//
//  CameraFaceDetectionExampleViewController.m
//  CameraFaceDetectionExample
//
//  Created by Xiaochao Yang on 8/16/11.
//  Copyright 2011 __MyCompanyName__. All rights reserved.
//

#import "CameraFaceDetectionExampleViewController.h"

@implementation CameraFaceDetectionExampleViewController

@synthesize mImageView;
@synthesize mBtn;

// NOTE you SHOULD cvReleaseImage() for the return value when end of the code.
- (IplImage *)CreateIplImageFromUIImage:(UIImage *)image {
    
	
	//UIGraphicsBeginImageContext(size); not sure what's it for....
	
	CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	IplImage *iplimage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4);
	
	CGContextRef contextRef = CGBitmapContextCreate(iplimage->imageData, iplimage->width, iplimage->height,
                                                    iplimage->depth, iplimage->widthStep,
                                                    colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
	UIGraphicsPushContext(contextRef);
	
	//CGContextRef contextRef = UIGraphicsGetCurrentContext();
	
	[image drawInRect:CGRectMake(0, 0, image.size.width, image.size.height)];
	
	CGContextRelease(contextRef);
	CGColorSpaceRelease(colorSpace);
	//commit all drawing effects
	//UIGraphicsEndImageContext();
	
	
	IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
	cvCvtColor(iplimage, ret, CV_BGRA2BGR);	
	
	cvFlip(ret, ret, -1);
	cvReleaseImage(&iplimage);
    
	return ret;
}

// NOTE You should convert color mode as RGB before passing to this function
- (UIImage *)UIImageFromColorImage:(IplImage *)bgr_img {
    
    IplImage *rgb_img = cvCreateImage(cvSize(bgr_img->width, bgr_img->height), bgr_img->depth, bgr_img->nChannels);
    cvCvtColor(bgr_img, rgb_img, CV_BGR2RGB); 
    
    //NSLog(@"IplImage (%d, %d) %d bits by %d channels, %d bytes/row %s", image->width, image->height, image->depth, image->nChannels, image->widthStep, image->channelSeq);
	
	CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	NSData *data = [NSData dataWithBytes:rgb_img->imageData length:rgb_img->imageSize];
	CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
	CGImageRef imageRef = CGImageCreate(rgb_img->width, rgb_img->height,
										rgb_img->depth, rgb_img->depth * rgb_img->nChannels, rgb_img->widthStep,
										colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
										provider, NULL, false, kCGRenderingIntentDefault);
	UIImage *ret = [UIImage imageWithCGImage:imageRef];
	CGImageRelease(imageRef);
	CGDataProviderRelease(provider);
	CGColorSpaceRelease(colorSpace);
    cvReleaseImage(&rgb_img);
	return ret;
    
}

#define MIN_NFEATURES_POINTS 5
#define MAX_CORNERS 10
#define MIN_FACE_DETECTION_SIZE 60
int detectFaceAndFeatPoints( IplImage* image,
                           CvHaarClassifierCascade* cascade,
                           CvRect* p_face_rect, 
                           int* corner_count,
                           CvPoint2D32f* corners
                           )
{
	
	CvMemStorage* storage = cvCreateMemStorage(0);
	CvSeq* faces;
	
	IplImage* mask_img = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);
	IplImage* eig_img = cvCreateImage(cvGetSize(image), IPL_DEPTH_32F, 1);
	IplImage* tmp_img = cvCreateImage(cvGetSize(image), IPL_DEPTH_32F, 1);
	
	*corner_count = MAX_CORNERS;
	float quality_level = 0.05;// small val gives more tracking points
	float min_distance = 5;
	int eig_block_size = 3;
	int use_harris = 0;// using harris is faster then default
    
	/* using the fastest variant */
	faces = cvHaarDetectObjects( image, cascade, storage, 1.3, 2, CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_DO_ROUGH_SEARCH, 
                                cvSize(MIN_FACE_DETECTION_SIZE, MIN_FACE_DETECTION_SIZE), cvSize(0,0));
	/* draw all the rectangles */
	/* extract the rectanlges only */
	if(faces->total){
		CvRect face_rect = *(CvRect*)cvGetSeqElem( faces, 0 );
		float trim_ratio = 0.1;
		CvRect face_rect_trim = cvRect( face_rect.x + cvRound(face_rect.width*0.15), face_rect.y + cvRound(face_rect.height*trim_ratio) ,
                                       cvRound( face_rect.width*(1-0.3) ), cvRound(face_rect.height*(1-2*trim_ratio) )) ;
		//face_rect = face_rect_trim;
		
		//mask
		cvSet(mask_img, cvScalar(0.0,0.0,0.0,0.0), NULL);
		cvSetImageROI(mask_img, face_rect_trim);
		cvSet(mask_img, cvScalar(255.0, 0.0,0.0,0.0), NULL);
		cvResetImageROI(mask_img);
		
		cvGoodFeaturesToTrack(image,
                              eig_img,                    // output
                              tmp_img,
                              corners,
                              corner_count,
                              quality_level,
                              min_distance,
                              mask_img,
                              eig_block_size,
                              use_harris, 0.04);
		// for(int i = 0; i < *corner_count; i++) {
		//printf("i:%d, x: %f, y: %f\n", i, corners[i].x, corners[i].y);
		//}
        
		//tracking
		*p_face_rect = face_rect;
	}
	
	cvReleaseImage(&mask_img );	
	cvReleaseImage(&eig_img );	
	cvReleaseImage(&tmp_img );	
    
	if ( faces->total == 0 || *corner_count < MIN_NFEATURES_POINTS ){
		cvReleaseMemStorage( &storage );
		return 0;
	}
	else{
		cvReleaseMemStorage( &storage );
		return 1;
	}
}


- (void)dealloc
{
    [super dealloc];
}

- (void)didReceiveMemoryWarning
{
    // Releases the view if it doesn't have a superview.
    [super didReceiveMemoryWarning];
    
    // Release any cached data, images, etc that aren't in use.
}

#pragma mark - View lifecycle


// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad
{
    [super viewDidLoad];
    mColorImage = NULL;
    mGreyImage = NULL;
    mCVCam = NULL;
    mHaarCascade = NULL;
    timerDate = nil;
}


- (void)viewDidUnload
{
    [super viewDidUnload];
    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    // Return YES for supported orientations
    return (interfaceOrientation == UIInterfaceOrientationPortrait);
}

-(IBAction)btnPressed:(UIButton *)sender
{
    //NSLog(@"Button clicked");
    if ([sender.titleLabel.text isEqualToString:@"Start Capturing"]) {
        
        // Initialize camera. 0 for back camera, 1 for front camera.
        mCVCam = cvCaptureFromCAM(1);
        
        // Initialize timer for retrivie each frame and processing 
        mTimer = [[NSTimer scheduledTimerWithTimeInterval: 1.0/30
                                                   target: self
                                                 selector:@selector(onTimer)
                                                 userInfo: nil repeats:YES] retain];
        // Initialize font for FPS display
        cvInitFont(&mFont, CV_FONT_HERSHEY_SIMPLEX, 0.6, 0.6, 0.0, 2, 8);
        
        [sender setTitle:@"Detect Face" forState:UIControlStateNormal];
    } 
    else if ([sender.titleLabel.text isEqualToString:@"Detect Face"]){
        
        //Initilization for face detection.
        
        NSString *cascade_path = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_alt2" ofType:@"xml"];
        const char* csytle_cascade_path = [cascade_path cStringUsingEncoding:NSASCIIStringEncoding];
        mHaarCascade = (CvHaarClassifierCascade*)cvLoad(csytle_cascade_path, NULL, NULL, NULL);
        mFeatPts = (CvPoint2D32f*)cvAlloc(MAX_CORNERS*sizeof(CvPoint2D32f));
        
        
        
        [sender setTitle:@"Stop Capturing" forState:UIControlStateNormal];
    } 
    else if([sender.titleLabel.text isEqualToString:@"Stop Capturing"]){
        
        // Stop the timer;
        [mTimer invalidate];
        [mTimer release];
        
        
        // Release the resources
        if (mColorImage) {
            cvReleaseImage(&mColorImage);
            mColorImage = NULL;
        }
        if (mGreyImage) {
            cvReleaseImage(&mGreyImage);
            mGreyImage = NULL;
        }
        [timerDate release];
        timerDate = nil;
        cvFree(&mFeatPts);
        
        // Restore UI status
        [sender setTitle:@"Start Capturing" forState:UIControlStateNormal];
        mImageView.image = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:@"OpenCV_Logo_with_text" ofType:@"png"]];
    }
}


- (void) onTimer{
    
    //NSLog(@"Timer triggered.");
    //NSLog(@"Button name: %@", mBtn.titleLabel.text);
    
    
    // retrive frame
    IplImage *frame  = cvQueryFrame(mCVCam);
    
    if (!mColorImage) {
        mColorImage = cvCreateImage(cvSize(frame->width, frame->height), frame->depth, frame->nChannels);
    }
    
    // Mirror the image
    cvFlip(frame, mColorImage, 1);
    
    if([mBtn.titleLabel.text isEqualToString:@"Stop Capturing"]){        
        if (!mGreyImage) {
            mGreyImage = cvCreateImage(cvSize(frame->width, frame->height), frame->depth, 1);
        }
        
        cvCvtColor(mColorImage, mGreyImage, CV_RGB2GRAY);
        
        
        int numFeatPts = 0;
        
        // Detect face and calculate faature points
        if (detectFaceAndFeatPoints(mGreyImage, mHaarCascade, &mFaceRect, &numFeatPts, mFeatPts)) {
            
            // Draw the face rectangle
            cvRectangle( mColorImage, cvPoint(mFaceRect.x,mFaceRect.y), cvPoint(mFaceRect.x+mFaceRect.width, mFaceRect.y+mFaceRect.height), 
                        CV_RGB(0,0,255), 3, 8, 0);
            // Draw feature points
            for(int i = 0; i < numFeatPts; i++) {
                cvCircle(mColorImage, cvPointFrom32f(mFeatPts[i]), 3, CV_RGB(255, 0, 0), 3, 8, 0);
            }
        }

    }
    
    //calculate fps
    if(timerDate){
        NSTimeInterval timeInterval = [timerDate timeIntervalSinceNow ]; //elapse since last frame;
        NSLog(@"FPS: %f", -1.0/timeInterval);
        char strFPS[100]; memset(strFPS, 0, 100);
        sprintf(strFPS, "FPS: %0.3f", -1.0/timeInterval);
        cvPutText(mColorImage, strFPS, cvPoint(30,20), &mFont, cvScalar(128.0, 32.0, 56.0, 0.0));
        
        [timerDate release];
    };
    timerDate = [[NSDate date] retain];  // update timer for the next frame;
    
    
    
    
    // convert to UIImage and display
    UIImage *uiimage = [self UIImageFromColorImage:mColorImage];
    mImageView.image = uiimage;
}

@end
