// Copyright 2011 Cooliris, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#import "MainViewController.h"
#import "ApplicationDelegate.h"
#import "Logging.h"

#define __DETECT_EYES_SEPARATELY__ 0

#define kPhotoSize 600  // Must be < 720px to work on iPod 4G

@implementation MainViewController

- (id) init {
  if ((self = [super init])) {
    _objectDetector = [[ObjectDetector alloc] initWithMaximumProcessingSize:(kPhotoSize / 2)];
    NSString* facePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_default" ofType:@"xml"];
    CHECK([_objectDetector loadClassifierFromXMLAtPath:facePath minimumObjectSize:40]);
    NSString* leftEyePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_mcs_lefteye" ofType:@"xml"];
    CHECK([_objectDetector loadClassifierFromXMLAtPath:leftEyePath minimumObjectSize:5]);
#if __DETECT_EYES_SEPARATELY__
    NSString* rightEyePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_mcs_righteye" ofType:@"xml"];
    CHECK([_objectDetector loadClassifierFromXMLAtPath:rightEyePath minimumObjectSize:5]);
#endif
  }
  return self;
}

- (void) cameraViewController:(CameraViewController*)controller didTakePhotoWithUIImage:(UIImage*)image metadata:(NSDictionary*)metadata {
  [(UIImageView*)self.view setImage:nil];
  [[UIApplication sharedApplication] beginIgnoringInteractionEvents];
  [[ApplicationDelegate sharedInstance] showSpinnerWithMessage:@"Processing…" animated:YES];
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
    UIImage* newImage = nil;
    CFAbsoluteTime time = CFAbsoluteTimeGetCurrent();
    NSArray* results = [_objectDetector detectObjectsInImage:image];
    LOG_VERBOSE(@"Object detection performed in %.2f seconds", CFAbsoluteTimeGetCurrent() - time);
    if (results) {
      UIGraphicsBeginImageContextWithOptions(image.size, YES, 1.0);
      [image drawAtPoint:CGPointZero];
      CGContextRef context = UIGraphicsGetCurrentContext();
      
      CGContextSetRGBFillColor(context, 1.0, 1.0, 1.0, 1.0);
      CGContextFillRect(context, CGRectMake(0.0, 0.0, 5.0, 5.0));
      
      NSArray* faces = [results objectAtIndex:0];
      CGContextSetRGBStrokeColor(context, 0.0, 1.0, 0.0, 1.0);
      for (NSValue* face in faces) {
        CGContextStrokeRect(context, [face CGRectValue]);
      }
      
      NSArray* leftEyes = [results objectAtIndex:1];
      CGContextSetRGBStrokeColor(context, 1.0, 0.0, 0.75, 1.0);
      for (NSValue* eye in leftEyes) {
        CGContextStrokeRect(context, [eye CGRectValue]);
      }
#if __DETECT_EYES_SEPARATELY__
      NSArray* rightEyes = [results objectAtIndex:2];
      CGContextSetRGBStrokeColor(context, 0.75, 1.0, 1.0, 1.0);
      for (NSValue* eye in rightEyes) {
        CGContextStrokeRect(context, [eye CGRectValue]);
      }
#endif
      
      for (NSValue* face in faces) {
        CGRect faceRect = [face CGRectValue];
        BOOL leftEyeMatch = NO;
        for (NSValue* eye in leftEyes) {
          CGRect rect = [eye CGRectValue];
          if (CGRectContainsRect(faceRect, rect)) {
            leftEyeMatch = YES;
            break;
          }
        }
        BOOL rightEyeMatch = NO;
#if __DETECT_EYES_SEPARATELY__
        for (NSValue* eye in rightEyes) {
          CGRect rect = [eye CGRectValue];
          if (CGRectContainsRect(faceRect, rect)) {
            rightEyeMatch = YES;
            break;
          }
        }
        if (leftEyeMatch && rightEyeMatch)
#else
        if (leftEyeMatch)
#endif
        {
          CGContextSetRGBStrokeColor(context, 1.0, 0.0, 0.0, 1.0);
          CGContextSetLineWidth(context, 3.0);
          CGContextStrokeRect(context, faceRect);
        }
      }
      newImage = UIGraphicsGetImageFromCurrentImageContext();
      UIGraphicsEndImageContext();
    } else {
      newImage = image;
    }
    dispatch_async(dispatch_get_main_queue(), ^{
      [(UIImageView*)self.view setImage:newImage];
      [[ApplicationDelegate sharedInstance] hideSpinner:YES];
      [[UIApplication sharedApplication] endIgnoringInteractionEvents];
    });
  });
  
  [self dismissModalViewControllerAnimated:YES];
}

- (void) cameraViewController:(CameraViewController*)controller didFailTakingPhotoWithError:(NSError*)error {
  LOG_ERROR(@"Failed taking photo: %@", error);
  
  [self dismissModalViewControllerAnimated:YES];
}

- (void) _takePhoto:(UITapGestureRecognizer*)recognizer {
#if TARGET_IPHONE_SIMULATOR
  static NSUInteger counter = 0;
  UIImage* image = [UIImage imageNamed:[NSString stringWithFormat:@"Photos/Photo-%02i.jpg", counter + 1]];
  counter = (counter + 1) % 5;
  [self cameraViewController:nil didTakePhotoWithUIImage:image metadata:nil];
#else
  CameraViewController* controller = (CameraViewController*)self.modalViewController;
  if (!controller.takingPhoto) {
    [controller takePhoto];
  } else {
    DNOT_REACHED();
  }
#endif
}

- (void) _showCamera:(UITapGestureRecognizer*)recognizer {
  CameraViewController* controller = [[CameraViewController alloc] init];
  controller.delegate = self;
  controller.squarePhotos = YES;
  controller.photoSize = kPhotoSize;
  controller.modalPresentationStyle = UIModalPresentationFullScreen;
  controller.modalTransitionStyle = UIModalTransitionStyleFlipHorizontal;
  [self presentModalViewController:controller animated:YES];
  [controller release];
  
  recognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(_takePhoto:)];
  [controller.view addGestureRecognizer:recognizer];
  [recognizer release];
}

- (void) loadView {
  UIImageView* imageView = [[UIImageView alloc] init];
  imageView.contentMode = UIViewContentModeScaleAspectFit;
  imageView.backgroundColor = [UIColor grayColor];
  imageView.userInteractionEnabled = YES;
  self.view = imageView;
  [imageView release];
  
  UITapGestureRecognizer* recognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(_showCamera:)];
  [self.view addGestureRecognizer:recognizer];
  [recognizer release];
}

- (BOOL) shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation {
  if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) {
    return YES;
  } else {
    return (toInterfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
  }
}

@end
