//
//  BBInputController.m
//  BBTouch
//
//  Created by ben smith on 5/28/08.
//  This file is part of BBTouch.
//
//  BBTouch is free software: you can redistribute it and/or modify
//  it under the terms of the GNU Lesser General Public License as published by
//  the Free Software Foundation, either version 3 of the License, or
//  (at your option) any later version.

//  BBTouch is distributed in the hope that it will be useful,
//  but WITHOUT ANY WARRANTY; without even the implied warranty of
//  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
//  GNU Lesser General Public License for more details.

//  You should have received a copy of the GNU Lesser General Public License
//  along with BBTouch.  If not, see <http://www.gnu.org/licenses/>.
// 
//  Copyright 2008 Ben Britten Smith ben@benbritten.com .
//

#import "BBInputController.h"
#import "BBBlobEventController.h"
#import "BBBlobDetector.h"
#import "BBHintedBlobDetector.h"
#import "BBFilteredDetector.h"
#import "BBRawBlobView.h"
#import "BBAverageImage.h"
#import "BBImagePreProcessor.h"
#import "CSGCamera.h"
#import "CSGImage.h"
#import "BBConfigurationController.h"
#import "BBMeshController.h"
#import "BBApplicationSupport.h"
#import "BBFilterConfigController.h"

@implementation BBInputController

@synthesize rawBlobView;
@synthesize freezeFrameImage;

id sharedInputController;

-(void)awakeFromNib
{
	sharedInputController = self;
  // need a blob detector and an image preprocessor
	//blobDetector = [[BBBlobDetector alloc] init];
	blobDetector = [[BBFilteredDetector alloc] init];
	//blobDetector = [[BBHintedBlobDetector alloc] init];
	//[blobDetector setUseHints:YES];
	[self setFreezeFrame:NO];
	
	[self setValue:[NSNumber numberWithBool:NO] forKey:@"detectBlobs"];

	[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(freezeFrameNotification:) name:@"BBFreezeFrameNotification" object:nil];
	
	[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appBegin:) name:@"NSApplicationDidFinishLaunchingNotification" object:nil];
	[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(blobDetectionNotification:) name:@"BBStartBlobDetectionNotification" object:nil];
	

}
 
+(BBInputController*)sharedInputController
{
	return sharedInputController;
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
	// either the ROI has changed or the Invert pref
	if ([keyPath isEqualToString:@"distortionMap.sourceMesh.boundingRect"]) {
		[blobDetector setROI:[[BBMeshController sharedMeshController] regionOfInterest]];
		return;		
	}
	// else it is one of the filter settings, reset them all it is easier
	[self resetFilterSettings];
}

-(void)resetFilterSettings
{
	[blobDetector setInvert:[BBConfigurationController boolForSettingKey:@"BBInvertBlobCheck"]];	
	[blobDetector setThreshold:[BBConfigurationController intForSettingKey:@"BBMainThreshold"]];	

	if (![blobDetector isKindOfClass:[BBFilteredDetector class]]) return;
	
	
	[(BBFilteredDetector*)blobDetector setHighpassValue:[BBConfigurationController intForSettingKey:@"BBHighPassKernelSize"]];	
	[(BBFilteredDetector*)blobDetector setNoiseValue:[BBConfigurationController intForSettingKey:@"BBNoiseKernelSize"]];	
	[(BBFilteredDetector*)blobDetector setThreshSize:[BBConfigurationController intForSettingKey:@"BBThresholdKernelSize"]];	
	[(BBFilteredDetector*)blobDetector setThreshValue:[BBConfigurationController intForSettingKey:@"BBThresholdMedianOffset"]];	
}

// this is so that other parts of the app can turn on blob detection (llike the calibrator)
-(void)blobDetectionNotification:(id)sender
{
	[self setValue:[NSNumber numberWithBool:YES] forKey:@"detectBlobs"];	
}

// this goes off when the app is finished launching
-(void)appBegin:(id)n
{
	[[BBConfigurationController sharedConfigurationController] addObserver:self forKeyPath:@"configurationSettings.BBInvertBlobCheck" options:NSKeyValueObservingOptionNew context:nil];
	[[BBConfigurationController configurationSettings] addObserver:self forKeyPath:@"BBMainThreshold" options:NSKeyValueObservingOptionNew context:nil];
	[[BBConfigurationController configurationSettings] addObserver:self forKeyPath:@"BBHighPassKernelSize" options:NSKeyValueObservingOptionNew context:nil];
	[[BBConfigurationController configurationSettings] addObserver:self forKeyPath:@"BBNoiseKernelSize" options:NSKeyValueObservingOptionNew context:nil];
	[[BBConfigurationController configurationSettings] addObserver:self forKeyPath:@"BBThresholdKernelSize" options:NSKeyValueObservingOptionNew context:nil];
	[[BBConfigurationController configurationSettings] addObserver:self forKeyPath:@"BBThresholdMedianOffset" options:NSKeyValueObservingOptionNew context:nil];
	
	// first, thing: gonna need a camera. Do this here and not in the awakeFromNib to be sure that the
	// configuration object is fully set up
	[[BBMeshController sharedMeshController] addObserver:self forKeyPath:@"distortionMap.sourceMesh.boundingRect" options:NSKeyValueObservingOptionNew context:nil];

	[blobDetector setROI:[[BBMeshController sharedMeshController] regionOfInterest]];	
	[blobDetector setThreshold:[BBConfigurationController intForSettingKey:@"BBMainThreshold"]];	
	[blobDetector setInvert:[BBConfigurationController boolForSettingKey:@"BBInvertBlobCheck"]];	

	camera = [[CSGCamera alloc] init];
	[camera setDelegate:self];
	
	NSString * camSettingsFile = [BBApplicationSupport pathForApplicationSupportFileWithName:@"BBCameraSettingsData"];
	if (camSettingsFile == nil) {
		// then we need to make one
		camSettingsFile = [[BBApplicationSupport applicationSupportFolder] stringByAppendingPathComponent:@"BBCameraSettingsData"];
	}
	[camera setSettingsFilePath:camSettingsFile];
	
	if (![camera startWithSize:[[BBConfigurationController sharedConfigurationController] rawImageSize]]) NSLog(@"could not start camera");	

	// kick off a timed event, wait a second for everything to steady out with the camera, 
	// then take the inital BG snap
	[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(initialBGSnap:) userInfo:nil repeats:NO];
	[self resetFilterSettings];
	
}

// one shot timer selector for the startup BG snap
- (void)initialBGSnap:(NSTimer*)theTimer
{
	[self backgroundSnap:nil];
}

////////////////////////////////////////////////////////////////////////
// main method
// this is where the magic happens, this is where the blob tracking begins
// we grab the image from the camera, and start the data pipeline
- (void)camera:(CSGCamera *)aCamera didReceiveFrame:(CSGImage *)aFrame;
{
	// start a pool so that we can control when and how stuff gets collected (so that we dont
	// bog down and get laggy performance)
	NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; 
//	NSDate * start = [NSDate date]; // this is for performance meausurement only
//	NSDate * postProc = [NSDate date]; // this is for performance meausurement only

	// if this is a BG image run, then grab it and get out
	// no need to look for blobs in the BG image.
	if (backgroundSnapFlag) {
		// our first BG image, we are going to take a bunch and average them
		bgCountDown = 10;
		[averageImage release];
	//	NSImage * preProcessed = [BBImagePreProcessor borderROI:aFrame];
		averageImage = [[BBAverageImage alloc] initWithSize:[aFrame size]];
		[averageImage addImage:aFrame number:bgCountDown-- of:10];
		backgroundInProgress = YES;
		backgroundSnapFlag = NO;
		return;
	}
	if (backgroundInProgress) {
		// one of the BG images
//		NSImage * preProcessed = [BBImagePreProcessor borderROI:aFrame];
		[averageImage addImage:aFrame number:bgCountDown-- of:10];
		if (bgCountDown < 0) {
			// we are all done, send the averaged image to the blob detector
			backgroundInProgress = NO;
			[blobDetector setBgImage:averageImage];
			// now set the filter config as well
			[[BBFilterConfigController sharedFilterConfigController] setBackgroundImage:averageImage];
		}
		return;
	}
		
	// this is the main loop here. this gets called once every frame
	
	// first check the sampletime, this is basically the minimim FPS
	// the idea being that I can throw out frames if i get too bogged down
	// this makes the interface more responsive when things go wrong
	if ([NSDate timeIntervalSinceReferenceDate] - [aFrame sampleTime] > 0.75) {
		NSLog(@"sample time too long throw it out: %f",[NSDate timeIntervalSinceReferenceDate] - [aFrame sampleTime]);
		// if it is too long, then we should shut down blob detection
		if ([NSDate timeIntervalSinceReferenceDate] - [aFrame sampleTime] > 4.0) [self setValue:[NSNumber numberWithBool:NO] forKey:@"detectBlobs"];
		return;
	}
	
	// freeze frame hackery.  would be better to just set the image in 
	// the view, but for now we will just use the existing data path
	if (freezeFrame) {
		if (freezeFrameImage == nil) self.freezeFrameImage = aFrame;
		aFrame = (CSGImage*)self.freezeFrameImage;
	}
	
	
	// TODO: make a watchdog timer for the blob detector, so if it is taking too long, die elegantly
	// (ie throw out the frame, kick up an error, something)
	if (detectBlobs) {
		// do the heavy lifting
		[blobDetector analyze:aFrame];
		// now send the raw blobs to the event controller to be filtered into events
		// make a copy, otherwise the blobs can change out from underneath us
		NSArray * theseBlobs = [[blobDetector blobs] copy]; 
		// send blobs to the event controller. the EC does all the analyzing and tracking
		[[BBBlobEventController sharedBlobEventController] setBlobs:theseBlobs];	
		// if we are looking, send the blobs to the raw blob vies as well.
		if ([BBConfigurationController boolForSettingKey:@"BBShowRawBlobsFlag"]) {
			[rawBlobView setBlobs:theseBlobs];
		}
		[theseBlobs release];
	}
	
	//NSDate * postDetect = [NSDate date]; // this is for performance meausurement only
	
	// this is for the configuration objects, if they need the raw image.
	// TODO: i should probably link up this object with the config controller and send the image
	// that way, but for now this wil do
	if ([BBConfigurationController boolForSettingKey:@"BBShowRawVideoFlag"]) {
		[[NSNotificationCenter defaultCenter] postNotificationName:@"BBRawVideoImageChange" object:aFrame];
	}

	//more measurement/performance stuff. nsdate is fine (and easy) for measuring stuff in the ms range.
	//NSDate * postView = [NSDate date];
	//printf("%f\t%f\t%f\n",[postProc timeIntervalSinceDate:start],[postDetect timeIntervalSinceDate:start],[postView timeIntervalSinceDate:start]);
	[pool release];
}

-(IBAction)backgroundSnap:(id)sender
{
	// TODO: move this to a configurator only object
	backgroundSnapFlag = YES;
}

-(IBAction)showCameraSettings:(id)sender
{
	// TODO: move this to a configurator only object
	[camera showSettings];
	backgroundSnapFlag = YES;
}

-(void)freezeFrameNotification:(id)note
{
	[self setFreezeFrame:YES];
}

-(BOOL)freezeFrame
{
	return freezeFrame;
}

-(void)setFreezeFrame:(BOOL)b
{
	if (!b) {
		// then invalidate our freeze image
		self.freezeFrameImage = nil;
	}
	freezeFrame = b;
}

- (void) dealloc
{
	[rawBlobView release];
	[super dealloc];
}


@end
