//
//  AppDelegate.m
//  OpenTouch
//
//  Created by Bridger Maxwell on 4/2/08.
//  Copyright 2008 Fiery Ferret. All rights reserved.
//

#import "AppDelegate.h"

@interface AppDelegate (AppDelegate_Private)
- (void)refreshDevices;
@end


@implementation AppDelegate

@synthesize colorCorrectionFilter, smoothingFilter;

- (void)awakeFromNib {
	
	monoChromeFilter = [[CIFilter filterWithName:@"CIMaximumComponent"] retain];	    // Monochrome filter	
	[monoChromeFilter setDefaults];                                                // set the filter to its default values
	
	smoothingFilter = [[CIFilter filterWithName:@"CINoiseReduction"] retain];	    // Monochrome filter	
	[smoothingFilter setDefaults];                                                // set the filter to its default values
	
	colorCorrectionFilter = [[CIFilter filterWithName:@"CIColorControls"] retain];	    // Color filter	
	[colorCorrectionFilter setDefaults];                                                // set the filter to its default values
	
	backgroundFilter = [[CIFilter filterWithName:@"CIDifferenceBlendMode"] retain];	    // Background filter by using difference blend mode
	[backgroundFilter setDefaults];
	needsNewBackground = YES;
	
	
	threshold = 0.50f;
	dimMinX = 5;
	dimMinY = 5;
	
	blobDetector = new blobDetection::BlobDetection(640, 480, 32);
	blob = new blobDetection::Blob;
	
	blobDetector->setPosDiscrimination(false);
	blobDetector->setThreshold(threshold);
	blobDetector->setBlobDimensionMin(dimMinX, dimMinY);
	blob->setParent(blobDetector);
	
	
	// Create a capture session
	session = [[QTCaptureSession alloc] init];
	
	// Attach preview to session
	decompressedVideoOutput = [[QTCaptureVideoPreviewOutput alloc] init];
	[decompressedVideoOutput setDelegate:self];
	NSError* error = [[NSError alloc] init];
	[session addOutput:decompressedVideoOutput error:&error];
	
	NSArray *myVideoDevices = [self videoDevices];

	if ([myVideoDevices count] > 0) {
		[self setSelectedVideoDevice:[myVideoDevices objectAtIndex:0]];
	}
	
	[session startRunning];		

}

- (NSApplicationTerminateReply) applicationShouldTerminate:(NSApplication *) sender
{
	return NSTerminateNow;
}

- (void)applicationWillTerminate:(NSNotification *)aNotification {
	[self setSelectedVideoDevice:nil];
	
	// Stop the session
	[session stopRunning];
}


- (void)dealloc {
	[[NSNotificationCenter defaultCenter] removeObserver:self];
	
	[decompressedVideoOutput release];
	[session release];
	[videoDeviceInput release];
	
	[videoDevices release];
	
	[backgroundFilter release];
	[monoChromeFilter release];
    [colorCorrectionFilter release];
	
    [super dealloc];
}

- (void)refreshDevices
{
	[self willChangeValueForKey:@"videoDevices"];
	[videoDevices release];
	videoDevices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
	[self didChangeValueForKey:@"videoDevices"];
	
	if (![videoDevices containsObject:[self selectedVideoDevice]]) {
		[self setSelectedVideoDevice:nil];
	}
}

- (NSArray *)videoDevices
{
	if (!videoDevices)
		[self refreshDevices];
	
	return videoDevices;
}

- (QTCaptureDevice *)selectedVideoDevice
{
	return [videoDeviceInput device];
}

- (void)setSelectedVideoDevice:(QTCaptureDevice *)selectedVideoDevice
{
	if (videoDeviceInput) {
		// Remove the old device input from the session and close the device
		[session removeInput:videoDeviceInput];
		[[videoDeviceInput device] close];
		[videoDeviceInput release];
		videoDeviceInput = nil;
	}
	
	[backgroundFilter setValue:nil forKey:@"inputBackgroundImage"]; 
	if (selectedVideoDevice) {
		NSError *error = nil;
		BOOL success;
		
		// Try to open the new device
		success = [selectedVideoDevice open:&error];
		if (!success) {
			[[NSAlert alertWithError:error] runModal];
			return;
		}
		
		// Create a device input for the device and add it to the session
		videoDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:selectedVideoDevice];
		
		success = [session addInput:videoDeviceInput error:&error];
		if (!success) {
			[[NSAlert alertWithError:error] runModal];
			[videoDeviceInput release];
			videoDeviceInput = nil;
			[selectedVideoDevice close];
			return;
		}
	}
	needsNewBackground = YES;
}


- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{	
	CIImage* image = [CIImage imageWithCVImageBuffer:videoFrame];
	
	[monoChromeFilter setValue:image forKey:@"inputImage"];  //First we monochrome it (only black and white
	//[smoothingFilter setValue:[monoChromeFilter valueForKey:@"outputImage"] forKey:@"inputImage"];
	
	if (needsNewBackground) { //If the flag has been raised we will capture a new background
		[backgroundFilter setValue:[monoChromeFilter valueForKey:@"outputImage"] forKey:@"inputBackgroundImage"]; 
		if (videoDeviceInput) {
			needsNewBackground = NO;
		}
	}
	
	[backgroundFilter setValue:[monoChromeFilter valueForKey:@"outputImage"] forKey:@"inputImage"]; //Now we subtract the background from the monochrome image
	[colorCorrectionFilter setValue:[backgroundFilter valueForKey:@"outputImage"] forKey:@"inputImage"]; 
	CIImage* finalImage = [colorCorrectionFilter valueForKey:@"outputImage"];
	
//	int* imageData;
//	
//	size_t rowBytes = [self optimalRowBytesForWidth:[finalImage extent].size.width bytesPerPixel:4];
//	
//	[[[NSGraphicsContext currentContext] CIContext] render:finalImage 
//												  toBitmap:&imageData rowBytes:rowBytes
//													bounds:[finalImage extent] 
//													format:kCIFormatARGB8 
//												colorSpace:CGColorSpaceCreateWithName(kCGColorSpaceGenericGray)];
//	if (imageData) {
//		blobDetector->computeBlobs(imageData);
//		NSLog(@"On Frame %i there are %i blobs. Sample byte 5: %i",frameCount++,blobDetector->getBlobNb(),imageData[19]);
//	}
		
	[outputView setImage:finalImage];
	[outputView setNeedsDisplay:YES];
}

- (void) drawBlobsOnCIImage:(CIImage *)image {
	for (int n=0 ; n< blobDetector->getBlobNb() ; n++)
	{
		*blob = blobDetector->getBlob(n);
		if (blob->isOk())
		{
			NSRect rectangle = NSMakeRect(blob->xMin, blob->yMin, blob->xMax - blob->xMin , blob->yMax - blob->yMin);
		}
	}
	
}

- (size_t)optimalRowBytesForWidth: (size_t)width bytesPerPixel: (size_t)bytesPerPixel
{
    size_t rowBytes = width * bytesPerPixel;
    
    //Widen rowBytes out to a integer multiple of 16 bytes
    rowBytes = (rowBytes + 15) & ~15;
    
    //Make sure we are not an even power of 2 wide. 
    //Will loop a few times for rowBytes <= 16.
    while( 0 == (rowBytes & (rowBytes - 1) ) )
        rowBytes += 16;
    
    return rowBytes;
}

- (IBAction)setNeedsNewBackground:(id)sender
{
	needsNewBackground = YES;
}

@end
